sha
stringlengths
40
40
text
stringlengths
1
13.4M
id
stringlengths
2
117
tags
listlengths
1
7.91k
created_at
stringlengths
25
25
metadata
stringlengths
2
875k
last_modified
stringlengths
25
25
arxiv
listlengths
0
25
languages
listlengths
0
7.91k
tags_str
stringlengths
17
159k
text_str
stringlengths
1
447k
text_lists
listlengths
0
352
processed_texts
listlengths
1
353
tokens_length
listlengths
1
353
input_texts
listlengths
1
40
c96033aba17e71bae2850e86bcf56f188bb8f0f8
# Dataset Card for "fairness_doctor_google_flan_t5_xxl_mode_T_SPECIFIC_A_ns_4800" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
CVasNLPExperiments/fairness_doctor_google_flan_t5_xxl_mode_T_SPECIFIC_A_ns_4800
[ "region:us" ]
2023-08-18T06:29:29+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "int64"}, {"name": "prompt", "dtype": "string"}, {"name": "true_label", "dtype": "string"}, {"name": "scores", "sequence": "float64"}, {"name": "prediction", "dtype": "string"}], "splits": [{"name": "fewshot_0__Attributes_LAION_ViT_H_14_2B_descriptors_text_davinci_003_full_clip_tags_LAION_ViT_H_14_2B_simple_specific_rices", "num_bytes": 2154061, "num_examples": 4800}], "download_size": 253341, "dataset_size": 2154061}}
2023-08-18T06:29:33+00:00
[]
[]
TAGS #region-us
# Dataset Card for "fairness_doctor_google_flan_t5_xxl_mode_T_SPECIFIC_A_ns_4800" More Information needed
[ "# Dataset Card for \"fairness_doctor_google_flan_t5_xxl_mode_T_SPECIFIC_A_ns_4800\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"fairness_doctor_google_flan_t5_xxl_mode_T_SPECIFIC_A_ns_4800\"\n\nMore Information needed" ]
[ 6, 40 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"fairness_doctor_google_flan_t5_xxl_mode_T_SPECIFIC_A_ns_4800\"\n\nMore Information needed" ]
78ec70752295d2fa693930a657be804c486390cb
# Dataset Card for "fairness_firefighter_google_flan_t5_xxl_mode_T_SPECIFIC_A_ns_4800" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
CVasNLPExperiments/fairness_firefighter_google_flan_t5_xxl_mode_T_SPECIFIC_A_ns_4800
[ "region:us" ]
2023-08-18T06:45:28+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "int64"}, {"name": "prompt", "dtype": "string"}, {"name": "true_label", "dtype": "string"}, {"name": "scores", "sequence": "float64"}, {"name": "prediction", "dtype": "string"}], "splits": [{"name": "fewshot_0__Attributes_LAION_ViT_H_14_2B_descriptors_text_davinci_003_full_clip_tags_LAION_ViT_H_14_2B_simple_specific_rices", "num_bytes": 2480232, "num_examples": 4800}], "download_size": 183869, "dataset_size": 2480232}}
2023-08-18T06:45:32+00:00
[]
[]
TAGS #region-us
# Dataset Card for "fairness_firefighter_google_flan_t5_xxl_mode_T_SPECIFIC_A_ns_4800" More Information needed
[ "# Dataset Card for \"fairness_firefighter_google_flan_t5_xxl_mode_T_SPECIFIC_A_ns_4800\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"fairness_firefighter_google_flan_t5_xxl_mode_T_SPECIFIC_A_ns_4800\"\n\nMore Information needed" ]
[ 6, 40 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"fairness_firefighter_google_flan_t5_xxl_mode_T_SPECIFIC_A_ns_4800\"\n\nMore Information needed" ]
5dfdb7610c08e05f7368b7b106cf5bd27c2027cd
# Dataset Card for "deepfashion_densepose" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
HuangHaoyang/deepfashion_densepose
[ "region:us" ]
2023-08-18T06:48:28+00:00
{"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "canny", "dtype": "image"}, {"name": "image_caption", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 439222.0, "num_examples": 1}], "download_size": 441487, "dataset_size": 439222.0}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-08-18T08:25:20+00:00
[]
[]
TAGS #region-us
# Dataset Card for "deepfashion_densepose" More Information needed
[ "# Dataset Card for \"deepfashion_densepose\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"deepfashion_densepose\"\n\nMore Information needed" ]
[ 6, 15 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"deepfashion_densepose\"\n\nMore Information needed" ]
957cbac9215dd349e1e196a6da9dc975c5d71346
# Dataset Card for "fairness_pilot_google_flan_t5_xxl_mode_T_SPECIFIC_A_ns_4800" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
CVasNLPExperiments/fairness_pilot_google_flan_t5_xxl_mode_T_SPECIFIC_A_ns_4800
[ "region:us" ]
2023-08-18T06:53:03+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "int64"}, {"name": "prompt", "dtype": "string"}, {"name": "true_label", "dtype": "string"}, {"name": "scores", "sequence": "float64"}, {"name": "prediction", "dtype": "string"}], "splits": [{"name": "fewshot_0__Attributes_LAION_ViT_H_14_2B_descriptors_text_davinci_003_full_clip_tags_LAION_ViT_H_14_2B_simple_specific_rices", "num_bytes": 2109342, "num_examples": 4800}], "download_size": 309678, "dataset_size": 2109342}}
2023-08-18T06:53:08+00:00
[]
[]
TAGS #region-us
# Dataset Card for "fairness_pilot_google_flan_t5_xxl_mode_T_SPECIFIC_A_ns_4800" More Information needed
[ "# Dataset Card for \"fairness_pilot_google_flan_t5_xxl_mode_T_SPECIFIC_A_ns_4800\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"fairness_pilot_google_flan_t5_xxl_mode_T_SPECIFIC_A_ns_4800\"\n\nMore Information needed" ]
[ 6, 39 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"fairness_pilot_google_flan_t5_xxl_mode_T_SPECIFIC_A_ns_4800\"\n\nMore Information needed" ]
466d7a2874495828a1577a2dcd6e5451aabae9e1
# Dataset of clownpiece/クラウンピース (Touhou) This is the dataset of clownpiece/クラウンピース (Touhou), containing 500 images and their tags. The core tags of this character are `blonde_hair, long_hair, hat, jester_cap, wings, fairy_wings, purple_headwear, red_eyes, bangs, very_long_hair, hair_between_eyes, polka_dot_headwear, pink_eyes, breasts`, which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). ## List of Packages | Name | Images | Size | Download | Type | Description | |:-----------------|---------:|:-----------|:-------------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------| | raw | 500 | 731.93 MiB | [Download](https://huggingface.co/datasets/CyberHarem/clownpiece_touhou/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). | | 800 | 500 | 391.96 MiB | [Download](https://huggingface.co/datasets/CyberHarem/clownpiece_touhou/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. | | stage3-p480-800 | 1222 | 859.37 MiB | [Download](https://huggingface.co/datasets/CyberHarem/clownpiece_touhou/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | | 1200 | 500 | 637.91 MiB | [Download](https://huggingface.co/datasets/CyberHarem/clownpiece_touhou/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. | | stage3-p480-1200 | 1222 | 1.22 GiB | [Download](https://huggingface.co/datasets/CyberHarem/clownpiece_touhou/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code ```python import os import zipfile from huggingface_hub import hf_hub_download from waifuc.source import LocalSource # download raw archive file zip_file = hf_hub_download( repo_id='CyberHarem/clownpiece_touhou', repo_type='dataset', filename='dataset-raw.zip', ) # extract files to your directory dataset_dir = 'dataset_dir' os.makedirs(dataset_dir, exist_ok=True) with zipfile.ZipFile(zip_file, 'r') as zf: zf.extractall(dataset_dir) # load the dataset with waifuc source = LocalSource(dataset_dir) for item in source: print(item.image, item.meta['filename'], item.meta['tags']) ``` ## List of Clusters List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | 0 | 36 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | 1girl, american_flag_dress, american_flag_legwear, polka_dot, short_sleeves, solo, looking_at_viewer, neck_ruff, smile, open_mouth, torch, star_print, fire, holding, striped_pantyhose, striped_dress, purple_eyes | | 1 | 7 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | 1girl, american_flag_dress, american_flag_legwear, blush, fairy, full_body, polka_dot, short_sleeves, signature, solo, star_print, striped_dress, striped_pantyhose, open_mouth, fang, smile, pink_headwear, simple_background | | 2 | 9 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | 1girl, american_flag_dress, blush_stickers, chibi, full_body, neck_ruff, open_mouth, polka_dot, short_sleeves, solo, star_print, striped_dress, striped_pants, :d, standing, fairy, american_flag_legwear | | 3 | 7 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | 1girl, american_flag_bikini, blush, looking_at_viewer, navel, small_breasts, solo, polka_dot, micro_bikini, open_mouth, smile, star_print, striped, white_background, no_wings, pink_headwear, simple_background, standing | | 4 | 7 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | 1girl, looking_at_viewer, navel, nipples, small_breasts, solo, blush, polka_dot, pussy, smile, completely_nude, simple_background, bar_censor, cowboy_shot, loli, transparent_wings, pink_headwear, standing, white_background | ### Table Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 1girl | american_flag_dress | american_flag_legwear | polka_dot | short_sleeves | solo | looking_at_viewer | neck_ruff | smile | open_mouth | torch | star_print | fire | holding | striped_pantyhose | striped_dress | purple_eyes | blush | fairy | full_body | signature | fang | pink_headwear | simple_background | blush_stickers | chibi | striped_pants | :d | standing | american_flag_bikini | navel | small_breasts | micro_bikini | striped | white_background | no_wings | nipples | pussy | completely_nude | bar_censor | cowboy_shot | loli | transparent_wings | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------|:----------------------|:------------------------|:------------|:----------------|:-------|:--------------------|:------------|:--------|:-------------|:--------|:-------------|:-------|:----------|:--------------------|:----------------|:--------------|:--------|:--------|:------------|:------------|:-------|:----------------|:--------------------|:-----------------|:--------|:----------------|:-----|:-----------|:-----------------------|:--------|:----------------|:---------------|:----------|:-------------------|:-----------|:----------|:--------|:------------------|:-------------|:--------------|:-------|:--------------------| | 0 | 36 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | 1 | 7 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | X | X | X | X | X | X | | | X | X | | X | | | X | X | | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | 2 | 9 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | X | X | X | X | X | X | | X | | X | | X | | | | X | | | X | X | | | | | X | X | X | X | X | | | | | | | | | | | | | | | | 3 | 7 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | X | | | X | | X | X | | X | X | | X | | | | | | X | | | | | X | X | | | | | X | X | X | X | X | X | X | X | | | | | | | | | 4 | 7 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | X | | | X | | X | X | | X | | | | | | | | | X | | | | | X | X | | | | | X | | X | X | | | X | | X | X | X | X | X | X | X |
CyberHarem/clownpiece_touhou
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2023-08-18T07:13:13+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2024-01-14T19:36:08+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of clownpiece/クラウンピース (Touhou) ====================================== This is the dataset of clownpiece/クラウンピース (Touhou), containing 500 images and their tags. The core tags of this character are 'blonde\_hair, long\_hair, hat, jester\_cap, wings, fairy\_wings, purple\_headwear, red\_eyes, bangs, very\_long\_hair, hair\_between\_eyes, polka\_dot\_headwear, pink\_eyes, breasts', which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization). List of Packages ---------------- ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code List of Clusters ---------------- List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version ### Table Version
[ "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n", "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ 44, 61, 5, 4 ]
[ "passage: TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.### Raw Text Version### Table Version" ]
1da46c73734cea6335b9d45eb3444b4ee17fe081
# Dataset of motoori_kosuzu/本居小鈴 (Touhou) This is the dataset of motoori_kosuzu/本居小鈴 (Touhou), containing 500 images and their tags. The core tags of this character are `hair_ornament, short_hair, two_side_up, red_eyes, orange_hair, bangs, red_hair`, which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). ## List of Packages | Name | Images | Size | Download | Type | Description | |:-----------------|---------:|:-----------|:-----------------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------| | raw | 500 | 624.36 MiB | [Download](https://huggingface.co/datasets/CyberHarem/motoori_kosuzu_touhou/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). | | 800 | 500 | 390.26 MiB | [Download](https://huggingface.co/datasets/CyberHarem/motoori_kosuzu_touhou/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. | | stage3-p480-800 | 1144 | 792.03 MiB | [Download](https://huggingface.co/datasets/CyberHarem/motoori_kosuzu_touhou/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | | 1200 | 500 | 568.44 MiB | [Download](https://huggingface.co/datasets/CyberHarem/motoori_kosuzu_touhou/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. | | stage3-p480-1200 | 1144 | 1.03 GiB | [Download](https://huggingface.co/datasets/CyberHarem/motoori_kosuzu_touhou/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code ```python import os import zipfile from huggingface_hub import hf_hub_download from waifuc.source import LocalSource # download raw archive file zip_file = hf_hub_download( repo_id='CyberHarem/motoori_kosuzu_touhou', repo_type='dataset', filename='dataset-raw.zip', ) # extract files to your directory dataset_dir = 'dataset_dir' os.makedirs(dataset_dir, exist_ok=True) with zipfile.ZipFile(zip_file, 'r') as zf: zf.extractall(dataset_dir) # load the dataset with waifuc source = LocalSource(dataset_dir) for item in source: print(item.image, item.meta['filename'], item.meta['tags']) ``` ## List of Clusters List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | 0 | 11 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | 1girl, apron, checkered_kimono, hair_bell, solo, wide_sleeves, jingle_bell, looking_at_viewer, smile, character_name, clothes_writing, checkered_shirt, book | | 1 | 11 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | 1girl, apron, hair_bell, smile, solo, wide_sleeves, character_name, clothes_writing, blush, book, jingle_bell, long_sleeves, lace-up_boots, looking_at_viewer, checkered_kimono, shirt | | 2 | 14 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | 1girl, checkered_kimono, hair_bell, jingle_bell, long_sleeves, solo, wide_sleeves, holding_book, looking_at_viewer, smile, boots, green_skirt, full_body, character_name, yellow_apron, clothes_writing, brown_footwear, cross-laced_footwear, open_mouth, frills, simple_background | | 3 | 10 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | 1girl, checkered_kimono, hair_bell, jingle_bell, long_sleeves, looking_at_viewer, open_mouth, solo, wide_sleeves, :d, frills, yellow_apron, blush, green_skirt, hair_between_eyes, holding_book, character_name, clothes_writing | | 4 | 7 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | 1girl, checkered_kimono, green_skirt, hair_bell, holding_book, jingle_bell, long_sleeves, solo, wide_sleeves, yellow_apron, closed_mouth, frills, looking_at_viewer, simple_background, smile, shirt, boots, brown_footwear, clothes_writing, white_background | | 5 | 5 | ![](samples/5/clu5-sample0.png) | ![](samples/5/clu5-sample1.png) | ![](samples/5/clu5-sample2.png) | ![](samples/5/clu5-sample3.png) | ![](samples/5/clu5-sample4.png) | 1girl, checkered_kimono, closed_mouth, hair_bell, jingle_bell, long_sleeves, looking_at_viewer, simple_background, solo, yellow_apron, character_name, clothes_writing, orange_eyes, smile, upper_body, wide_sleeves, one-hour_drawing_challenge, white_background, glasses, round_eyewear | ### Table Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 1girl | apron | checkered_kimono | hair_bell | solo | wide_sleeves | jingle_bell | looking_at_viewer | smile | character_name | clothes_writing | checkered_shirt | book | blush | long_sleeves | lace-up_boots | shirt | holding_book | boots | green_skirt | full_body | yellow_apron | brown_footwear | cross-laced_footwear | open_mouth | frills | simple_background | :d | hair_between_eyes | closed_mouth | white_background | orange_eyes | upper_body | one-hour_drawing_challenge | glasses | round_eyewear | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------|:--------|:-------------------|:------------|:-------|:---------------|:--------------|:--------------------|:--------|:-----------------|:------------------|:------------------|:-------|:--------|:---------------|:----------------|:--------|:---------------|:--------|:--------------|:------------|:---------------|:-----------------|:-----------------------|:-------------|:---------|:--------------------|:-----|:--------------------|:---------------|:-------------------|:--------------|:-------------|:-----------------------------|:----------|:----------------| | 0 | 11 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | 1 | 11 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | X | X | X | X | X | X | X | X | X | X | X | | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | 2 | 14 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | X | | X | X | X | X | X | X | X | X | X | | | | X | | | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | 3 | 10 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | X | | X | X | X | X | X | X | | X | X | | | X | X | | | X | | X | | X | | | X | X | | X | X | | | | | | | | | 4 | 7 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | X | | X | X | X | X | X | X | X | | X | | | | X | | X | X | X | X | | X | X | | | X | X | | | X | X | | | | | | | 5 | 5 | ![](samples/5/clu5-sample0.png) | ![](samples/5/clu5-sample1.png) | ![](samples/5/clu5-sample2.png) | ![](samples/5/clu5-sample3.png) | ![](samples/5/clu5-sample4.png) | X | | X | X | X | X | X | X | X | X | X | | | | X | | | | | | | X | | | | | X | | | X | X | X | X | X | X | X |
CyberHarem/motoori_kosuzu_touhou
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2023-08-18T07:16:55+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2024-01-14T22:20:36+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of motoori\_kosuzu/本居小鈴 (Touhou) ======================================== This is the dataset of motoori\_kosuzu/本居小鈴 (Touhou), containing 500 images and their tags. The core tags of this character are 'hair\_ornament, short\_hair, two\_side\_up, red\_eyes, orange\_hair, bangs, red\_hair', which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization). List of Packages ---------------- ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code List of Clusters ---------------- List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version ### Table Version
[ "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n", "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ 44, 61, 5, 4 ]
[ "passage: TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.### Raw Text Version### Table Version" ]
9da61fd253654914524f7eba74b0fa6461c0f974
This is a subset of a [large-dataset](https://www.kaggle.com/datasets/rmisra/imdb-spoiler-dataset) for classifying whether a movie review is a spoiler or not. It's used to fine-tune [roberta-base](https://huggingface.co/roberta-base) model for Text-Classification Model, [Check it out!](https://huggingface.co/bhavyagiri/roberta-base-finetuned-imdb-spoilers)
bhavyagiri/imdb-spoiler
[ "license:apache-2.0", "region:us" ]
2023-08-18T07:21:21+00:00
{"license": "apache-2.0"}
2023-08-20T08:15:52+00:00
[]
[]
TAGS #license-apache-2.0 #region-us
This is a subset of a large-dataset for classifying whether a movie review is a spoiler or not. It's used to fine-tune roberta-base model for Text-Classification Model, Check it out!
[]
[ "TAGS\n#license-apache-2.0 #region-us \n" ]
[ 14 ]
[ "passage: TAGS\n#license-apache-2.0 #region-us \n" ]
2305343441b13a0b0e122f48351abef2247b60fa
# Dataset Card for "tweet-sentiment-ita-eng" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Ocelot02/tweet-sentiment-ita-eng
[ "region:us" ]
2023-08-18T07:23:26+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "label", "dtype": {"class_label": {"names": {"0": "negative", "1": "neutral", "2": "positive"}}}}, {"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 199492, "num_examples": 1839}, {"name": "validation", "num_bytes": 36403, "num_examples": 324}, {"name": "test", "num_bytes": 97401, "num_examples": 870}], "download_size": 203442, "dataset_size": 333296}}
2023-08-18T07:25:13+00:00
[]
[]
TAGS #region-us
# Dataset Card for "tweet-sentiment-ita-eng" More Information needed
[ "# Dataset Card for \"tweet-sentiment-ita-eng\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"tweet-sentiment-ita-eng\"\n\nMore Information needed" ]
[ 6, 18 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"tweet-sentiment-ita-eng\"\n\nMore Information needed" ]
56a866b5cf88b24c4f3e1c08bcf530ac2cfa1ada
# Dataset Card for "unstructured-data-multilingual" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
jingwora/unstructured-data-multilingual
[ "region:us" ]
2023-08-18T07:23:41+00:00
{"dataset_info": {"features": [{"name": "language", "dtype": "string"}, {"name": "id", "dtype": "string"}, {"name": "product_id", "dtype": "string"}, {"name": "category", "dtype": "string"}, {"name": "sub_category", "dtype": "string"}, {"name": "product_name", "dtype": "string"}, {"name": "product_detail", "dtype": "string"}, {"name": "image_files", "dtype": "string"}, {"name": "review", "dtype": "string"}, {"name": "star", "dtype": "string"}, {"name": "sentiment", "dtype": "string"}], "splits": [{"name": "en", "num_bytes": 11790, "num_examples": 24}, {"name": "ja", "num_bytes": 10499, "num_examples": 24}, {"name": "th", "num_bytes": 12716, "num_examples": 24}], "download_size": 34282, "dataset_size": 35005}, "configs": [{"config_name": "default", "data_files": [{"split": "en", "path": "data/en-*"}, {"split": "ja", "path": "data/ja-*"}, {"split": "th", "path": "data/th-*"}]}]}
2023-08-19T02:46:36+00:00
[]
[]
TAGS #region-us
# Dataset Card for "unstructured-data-multilingual" More Information needed
[ "# Dataset Card for \"unstructured-data-multilingual\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"unstructured-data-multilingual\"\n\nMore Information needed" ]
[ 6, 19 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"unstructured-data-multilingual\"\n\nMore Information needed" ]
a68e54c0887795bf9872fd70e3f1bc8fbc6ba172
# Dataset Card for Evaluation run of PocketDoc/Dans-PileOfSets-Mk1-llama-13b-merged ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/PocketDoc/Dans-PileOfSets-Mk1-llama-13b-merged - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [PocketDoc/Dans-PileOfSets-Mk1-llama-13b-merged](https://huggingface.co/PocketDoc/Dans-PileOfSets-Mk1-llama-13b-merged) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_PocketDoc__Dans-PileOfSets-Mk1-llama-13b-merged", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-09-17T21:54:20.873992](https://huggingface.co/datasets/open-llm-leaderboard/details_PocketDoc__Dans-PileOfSets-Mk1-llama-13b-merged/blob/main/results_2023-09-17T21-54-20.873992.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.001572986577181208, "em_stderr": 0.0004058451132417743, "f1": 0.057128775167785376, "f1_stderr": 0.001288342539357545, "acc": 0.4232772429904026, "acc_stderr": 0.009826580746076554 }, "harness|drop|3": { "em": 0.001572986577181208, "em_stderr": 0.0004058451132417743, "f1": 0.057128775167785376, "f1_stderr": 0.001288342539357545 }, "harness|gsm8k|5": { "acc": 0.08491281273692192, "acc_stderr": 0.007678212824450797 }, "harness|winogrande|5": { "acc": 0.7616416732438832, "acc_stderr": 0.011974948667702311 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_PocketDoc__Dans-PileOfSets-Mk1-llama-13b-merged
[ "region:us" ]
2023-08-18T07:30:00+00:00
{"pretty_name": "Evaluation run of PocketDoc/Dans-PileOfSets-Mk1-llama-13b-merged", "dataset_summary": "Dataset automatically created during the evaluation run of model [PocketDoc/Dans-PileOfSets-Mk1-llama-13b-merged](https://huggingface.co/PocketDoc/Dans-PileOfSets-Mk1-llama-13b-merged) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_PocketDoc__Dans-PileOfSets-Mk1-llama-13b-merged\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-09-17T21:54:20.873992](https://huggingface.co/datasets/open-llm-leaderboard/details_PocketDoc__Dans-PileOfSets-Mk1-llama-13b-merged/blob/main/results_2023-09-17T21-54-20.873992.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.001572986577181208,\n \"em_stderr\": 0.0004058451132417743,\n \"f1\": 0.057128775167785376,\n \"f1_stderr\": 0.001288342539357545,\n \"acc\": 0.4232772429904026,\n \"acc_stderr\": 0.009826580746076554\n },\n \"harness|drop|3\": {\n \"em\": 0.001572986577181208,\n \"em_stderr\": 0.0004058451132417743,\n \"f1\": 0.057128775167785376,\n \"f1_stderr\": 0.001288342539357545\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.08491281273692192,\n \"acc_stderr\": 0.007678212824450797\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7616416732438832,\n \"acc_stderr\": 0.011974948667702311\n }\n}\n```", "repo_url": "https://huggingface.co/PocketDoc/Dans-PileOfSets-Mk1-llama-13b-merged", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|arc:challenge|25_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_09_17T21_54_20.873992", "path": ["**/details_harness|drop|3_2023-09-17T21-54-20.873992.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-09-17T21-54-20.873992.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_09_17T21_54_20.873992", "path": ["**/details_harness|gsm8k|5_2023-09-17T21-54-20.873992.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-09-17T21-54-20.873992.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hellaswag|10_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-18T14:55:50.956867.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-18T14:55:50.956867.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-18T14:55:50.956867.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_09_17T21_54_20.873992", "path": ["**/details_harness|winogrande|5_2023-09-17T21-54-20.873992.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-09-17T21-54-20.873992.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_18T14_55_50.956867", "path": ["results_2023-07-18T14:55:50.956867.parquet"]}, {"split": "2023_09_17T21_54_20.873992", "path": ["results_2023-09-17T21-54-20.873992.parquet"]}, {"split": "latest", "path": ["results_2023-09-17T21-54-20.873992.parquet"]}]}]}
2023-09-17T20:54:32+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of PocketDoc/Dans-PileOfSets-Mk1-llama-13b-merged ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model PocketDoc/Dans-PileOfSets-Mk1-llama-13b-merged on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-09-17T21:54:20.873992(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of PocketDoc/Dans-PileOfSets-Mk1-llama-13b-merged", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model PocketDoc/Dans-PileOfSets-Mk1-llama-13b-merged on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-17T21:54:20.873992(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of PocketDoc/Dans-PileOfSets-Mk1-llama-13b-merged", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model PocketDoc/Dans-PileOfSets-Mk1-llama-13b-merged on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-17T21:54:20.873992(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 32, 31, 180, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of PocketDoc/Dans-PileOfSets-Mk1-llama-13b-merged## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model PocketDoc/Dans-PileOfSets-Mk1-llama-13b-merged on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-09-17T21:54:20.873992(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
e6733049470890e86342934b6d7e4f848ae524a0
# Dataset of horikawa_raiko/堀川雷鼓/호리카와라이코 (Touhou) This is the dataset of horikawa_raiko/堀川雷鼓/호리카와라이코 (Touhou), containing 45 images and their tags. The core tags of this character are `red_eyes, red_hair, short_hair`, which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). ## List of Packages | Name | Images | Size | Download | Type | Description | |:-----------------|---------:|:----------|:-----------------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------| | raw | 45 | 50.24 MiB | [Download](https://huggingface.co/datasets/CyberHarem/horikawa_raiko_touhou/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). | | 800 | 45 | 36.76 MiB | [Download](https://huggingface.co/datasets/CyberHarem/horikawa_raiko_touhou/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. | | stage3-p480-800 | 93 | 65.02 MiB | [Download](https://huggingface.co/datasets/CyberHarem/horikawa_raiko_touhou/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | | 1200 | 45 | 47.95 MiB | [Download](https://huggingface.co/datasets/CyberHarem/horikawa_raiko_touhou/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. | | stage3-p480-1200 | 93 | 78.51 MiB | [Download](https://huggingface.co/datasets/CyberHarem/horikawa_raiko_touhou/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code ```python import os import zipfile from huggingface_hub import hf_hub_download from waifuc.source import LocalSource # download raw archive file zip_file = hf_hub_download( repo_id='CyberHarem/horikawa_raiko_touhou', repo_type='dataset', filename='dataset-raw.zip', ) # extract files to your directory dataset_dir = 'dataset_dir' os.makedirs(dataset_dir, exist_ok=True) with zipfile.ZipFile(zip_file, 'r') as zf: zf.extractall(dataset_dir) # load the dataset with waifuc source = LocalSource(dataset_dir) for item in source: print(item.image, item.meta['filename'], item.meta['tags']) ``` ## List of Clusters List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:------------------------------------------------------------------------------------------------------------------| | 0 | 45 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | 1girl, solo, smile, drumsticks, jacket, purple_necktie, skirt, looking_at_viewer, plaid_shirt, mitsudomoe_(shape) | ### Table Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 1girl | solo | smile | drumsticks | jacket | purple_necktie | skirt | looking_at_viewer | plaid_shirt | mitsudomoe_(shape) | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------|:-------|:--------|:-------------|:---------|:-----------------|:--------|:--------------------|:--------------|:---------------------| | 0 | 45 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | X | X | X | X | X | X | X | X | X | X |
CyberHarem/horikawa_raiko_touhou
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2023-08-18T07:32:23+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2024-01-14T22:27:16+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of horikawa\_raiko/堀川雷鼓/호리카와라이코 (Touhou) ================================================ This is the dataset of horikawa\_raiko/堀川雷鼓/호리카와라이코 (Touhou), containing 45 images and their tags. The core tags of this character are 'red\_eyes, red\_hair, short\_hair', which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization). List of Packages ---------------- ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code List of Clusters ---------------- List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version ### Table Version
[ "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n", "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ 44, 61, 5, 4 ]
[ "passage: TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.### Raw Text Version### Table Version" ]
efb888e277f6683725fb0a2754e87d6c00d800ba
# GQA-Inpaint Dataset GQA-Inpaint is a real image dataset to train and evaluate models for the instructional image inpainting task. Scene graphs of the [GQA dataset](https://cs.stanford.edu/people/dorarad/gqa/about.html) are exploited to generate paired training data by utilizing state-of-the-art instance segmentation and inpainting methods. Dataset usage and content details are explained in the [Inst-Inpaint GitHub repository](https://github.com/abyildirim/inst-inpaint).
abyildirim/gqa-inpaint
[ "license:cc-by-4.0", "region:us" ]
2023-08-18T07:39:11+00:00
{"license": "cc-by-4.0"}
2023-09-03T15:24:14+00:00
[]
[]
TAGS #license-cc-by-4.0 #region-us
# GQA-Inpaint Dataset GQA-Inpaint is a real image dataset to train and evaluate models for the instructional image inpainting task. Scene graphs of the GQA dataset are exploited to generate paired training data by utilizing state-of-the-art instance segmentation and inpainting methods. Dataset usage and content details are explained in the Inst-Inpaint GitHub repository.
[ "# GQA-Inpaint Dataset\n\nGQA-Inpaint is a real image dataset to train and evaluate models for the instructional image inpainting task. Scene graphs of the GQA dataset are exploited to generate paired training data by utilizing state-of-the-art instance segmentation and inpainting methods. Dataset usage and content details are explained in the Inst-Inpaint GitHub repository." ]
[ "TAGS\n#license-cc-by-4.0 #region-us \n", "# GQA-Inpaint Dataset\n\nGQA-Inpaint is a real image dataset to train and evaluate models for the instructional image inpainting task. Scene graphs of the GQA dataset are exploited to generate paired training data by utilizing state-of-the-art instance segmentation and inpainting methods. Dataset usage and content details are explained in the Inst-Inpaint GitHub repository." ]
[ 15, 99 ]
[ "passage: TAGS\n#license-cc-by-4.0 #region-us \n# GQA-Inpaint Dataset\n\nGQA-Inpaint is a real image dataset to train and evaluate models for the instructional image inpainting task. Scene graphs of the GQA dataset are exploited to generate paired training data by utilizing state-of-the-art instance segmentation and inpainting methods. Dataset usage and content details are explained in the Inst-Inpaint GitHub repository." ]
47b89cd02d6f8bf5c3e3ddf82833b407faf7e7e7
# Dataset of yagokoro_eirin/八意永琳/야고코로에이린 (Touhou) This is the dataset of yagokoro_eirin/八意永琳/야고코로에이린 (Touhou), containing 500 images and their tags. The core tags of this character are `long_hair, hat, nurse_cap, braid, grey_hair, breasts, very_long_hair, blue_eyes, single_braid, white_hair, large_breasts`, which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). ## List of Packages | Name | Images | Size | Download | Type | Description | |:-----------------|---------:|:-----------|:-----------------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------| | raw | 500 | 580.86 MiB | [Download](https://huggingface.co/datasets/CyberHarem/yagokoro_eirin_touhou/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). | | 800 | 500 | 362.30 MiB | [Download](https://huggingface.co/datasets/CyberHarem/yagokoro_eirin_touhou/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. | | stage3-p480-800 | 1058 | 694.64 MiB | [Download](https://huggingface.co/datasets/CyberHarem/yagokoro_eirin_touhou/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | | 1200 | 500 | 524.00 MiB | [Download](https://huggingface.co/datasets/CyberHarem/yagokoro_eirin_touhou/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. | | stage3-p480-1200 | 1058 | 936.52 MiB | [Download](https://huggingface.co/datasets/CyberHarem/yagokoro_eirin_touhou/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code ```python import os import zipfile from huggingface_hub import hf_hub_download from waifuc.source import LocalSource # download raw archive file zip_file = hf_hub_download( repo_id='CyberHarem/yagokoro_eirin_touhou', repo_type='dataset', filename='dataset-raw.zip', ) # extract files to your directory dataset_dir = 'dataset_dir' os.makedirs(dataset_dir, exist_ok=True) with zipfile.ZipFile(zip_file, 'r') as zf: zf.extractall(dataset_dir) # load the dataset with waifuc source = LocalSource(dataset_dir) for item in source: print(item.image, item.meta['filename'], item.meta['tags']) ``` ## List of Clusters List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | 0 | 10 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | 1girl, arrow_(projectile), bow_(weapon), solo, smile | | 1 | 8 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | 1girl, bow_(weapon), full_moon, solo, arrow_(projectile) | | 2 | 10 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | 1girl, smile, solo | | 3 | 14 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | 1girl, bangs, multicolored_dress, puffy_short_sleeves, red_cross, solo, holding_bow_(weapon), looking_at_viewer, blue_dress, constellation_print, closed_mouth, holding_arrow, smile, blue_headwear, trigram, frilled_dress, black_footwear, full_body, standing, two-tone_dress, blush, simple_background, white_background, yellow_eyes | | 4 | 11 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | 1girl, closed_mouth, looking_at_viewer, simple_background, solo, blue_headwear, multicolored_dress, upper_body, white_background, blue_dress, puffy_short_sleeves, red_cross, smile, parted_bangs, blush, constellation_print, holding | | 5 | 6 | ![](samples/5/clu5-sample0.png) | ![](samples/5/clu5-sample1.png) | ![](samples/5/clu5-sample2.png) | ![](samples/5/clu5-sample3.png) | ![](samples/5/clu5-sample4.png) | 1girl, blush, nipples, nude, solo, purple_eyes, blue_hair, pussy, spread_legs, sweat | | 6 | 8 | ![](samples/6/clu6-sample0.png) | ![](samples/6/clu6-sample1.png) | ![](samples/6/clu6-sample2.png) | ![](samples/6/clu6-sample3.png) | ![](samples/6/clu6-sample4.png) | 1boy, 1girl, hetero, solo_focus, blush, huge_breasts, nipples, penis, paizuri, nude, sweat, blue_hair, cum_on_breasts, purple_eyes, ejaculation, fellatio, tongue_out | | 7 | 15 | ![](samples/7/clu7-sample0.png) | ![](samples/7/clu7-sample1.png) | ![](samples/7/clu7-sample2.png) | ![](samples/7/clu7-sample3.png) | ![](samples/7/clu7-sample4.png) | 2girls, smile, black_hair, closed_eyes, blush | | 8 | 5 | ![](samples/8/clu8-sample0.png) | ![](samples/8/clu8-sample1.png) | ![](samples/8/clu8-sample2.png) | ![](samples/8/clu8-sample3.png) | ![](samples/8/clu8-sample4.png) | 1girl, blush, nipples, solo, looking_at_viewer, on_back, barefoot, bed_sheet, no_bra, off_shoulder, on_side, pillow, smile, ass, bare_legs, bare_shoulders, black_panties, dakimakura_(medium), hair_bow, navel, on_bed, open_dress, sweat, thighs, torn_clothes, white_panties | ### Table Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 1girl | arrow_(projectile) | bow_(weapon) | solo | smile | full_moon | bangs | multicolored_dress | puffy_short_sleeves | red_cross | holding_bow_(weapon) | looking_at_viewer | blue_dress | constellation_print | closed_mouth | holding_arrow | blue_headwear | trigram | frilled_dress | black_footwear | full_body | standing | two-tone_dress | blush | simple_background | white_background | yellow_eyes | upper_body | parted_bangs | holding | nipples | nude | purple_eyes | blue_hair | pussy | spread_legs | sweat | 1boy | hetero | solo_focus | huge_breasts | penis | paizuri | cum_on_breasts | ejaculation | fellatio | tongue_out | 2girls | black_hair | closed_eyes | on_back | barefoot | bed_sheet | no_bra | off_shoulder | on_side | pillow | ass | bare_legs | bare_shoulders | black_panties | dakimakura_(medium) | hair_bow | navel | on_bed | open_dress | thighs | torn_clothes | white_panties | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------|:---------------------|:---------------|:-------|:--------|:------------|:--------|:---------------------|:----------------------|:------------|:-----------------------|:--------------------|:-------------|:----------------------|:---------------|:----------------|:----------------|:----------|:----------------|:-----------------|:------------|:-----------|:-----------------|:--------|:--------------------|:-------------------|:--------------|:-------------|:---------------|:----------|:----------|:-------|:--------------|:------------|:--------|:--------------|:--------|:-------|:---------|:-------------|:---------------|:--------|:----------|:-----------------|:--------------|:-----------|:-------------|:---------|:-------------|:--------------|:----------|:-----------|:------------|:---------|:---------------|:----------|:---------|:------|:------------|:-----------------|:----------------|:----------------------|:-----------|:--------|:---------|:-------------|:---------|:---------------|:----------------| | 0 | 10 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 1 | 8 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | X | X | X | X | | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 2 | 10 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | X | | | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 3 | 14 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | X | | | X | X | | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 4 | 11 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | X | | | X | X | | | X | X | X | | X | X | X | X | | X | | | | | | | X | X | X | | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 5 | 6 | ![](samples/5/clu5-sample0.png) | ![](samples/5/clu5-sample1.png) | ![](samples/5/clu5-sample2.png) | ![](samples/5/clu5-sample3.png) | ![](samples/5/clu5-sample4.png) | X | | | X | | | | | | | | | | | | | | | | | | | | X | | | | | | | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 6 | 8 | ![](samples/6/clu6-sample0.png) | ![](samples/6/clu6-sample1.png) | ![](samples/6/clu6-sample2.png) | ![](samples/6/clu6-sample3.png) | ![](samples/6/clu6-sample4.png) | X | | | | | | | | | | | | | | | | | | | | | | | X | | | | | | | X | X | X | X | | | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | 7 | 15 | ![](samples/7/clu7-sample0.png) | ![](samples/7/clu7-sample1.png) | ![](samples/7/clu7-sample2.png) | ![](samples/7/clu7-sample3.png) | ![](samples/7/clu7-sample4.png) | | | | | X | | | | | | | | | | | | | | | | | | | X | | | | | | | | | | | | | | | | | | | | | | | | X | X | X | | | | | | | | | | | | | | | | | | | | | 8 | 5 | ![](samples/8/clu8-sample0.png) | ![](samples/8/clu8-sample1.png) | ![](samples/8/clu8-sample2.png) | ![](samples/8/clu8-sample3.png) | ![](samples/8/clu8-sample4.png) | X | | | X | X | | | | | | | X | | | | | | | | | | | | X | | | | | | | X | | | | | | X | | | | | | | | | | | | | | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X |
CyberHarem/yagokoro_eirin_touhou
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2023-08-18T07:52:18+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2024-01-14T16:45:18+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of yagokoro\_eirin/八意永琳/야고코로에이린 (Touhou) ================================================ This is the dataset of yagokoro\_eirin/八意永琳/야고코로에이린 (Touhou), containing 500 images and their tags. The core tags of this character are 'long\_hair, hat, nurse\_cap, braid, grey\_hair, breasts, very\_long\_hair, blue\_eyes, single\_braid, white\_hair, large\_breasts', which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization). List of Packages ---------------- ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code List of Clusters ---------------- List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version ### Table Version
[ "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n", "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ 44, 61, 5, 4 ]
[ "passage: TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.### Raw Text Version### Table Version" ]
39e2837bd57c09af5ca6f80a6483d40d904ad1ca
- VNDS: A Vietnamese Dataset for Summarization - https://ieeexplore.ieee.org/document/9023886/ - https://github.com/ThanhChinhBK/vietnews
nam194/vietnews
[ "region:us" ]
2023-08-18T07:55:17+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "guid", "dtype": "int64"}, {"name": "title", "dtype": "string"}, {"name": "abstract", "dtype": "string"}, {"name": "article", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 325418455, "num_examples": 99134}, {"name": "validation", "num_bytes": 73397317, "num_examples": 22184}, {"name": "test", "num_bytes": 74536959, "num_examples": 22498}], "download_size": 246524136, "dataset_size": 473352731}}
2023-08-18T08:01:24+00:00
[]
[]
TAGS #region-us
- VNDS: A Vietnamese Dataset for Summarization - URL - URL
[]
[ "TAGS\n#region-us \n" ]
[ 6 ]
[ "passage: TAGS\n#region-us \n" ]
36cbdd56686f5f53fb4f65389f2c05d7a30f2a01
# Dataset Card for "manipulations_multi" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
guyhadad01/manipulations_multi
[ "region:us" ]
2023-08-18T08:13:37+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "text", "dtype": "string"}, {"name": "labels", "sequence": "int64"}], "splits": [{"name": "train", "num_bytes": 44449, "num_examples": 263}, {"name": "test", "num_bytes": 11084, "num_examples": 66}], "download_size": 22617, "dataset_size": 55533}}
2023-08-18T08:46:32+00:00
[]
[]
TAGS #region-us
# Dataset Card for "manipulations_multi" More Information needed
[ "# Dataset Card for \"manipulations_multi\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"manipulations_multi\"\n\nMore Information needed" ]
[ 6, 15 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"manipulations_multi\"\n\nMore Information needed" ]
f56d349c4c5a0e76fb10831af08adad2b2939091
# Dataset of shinki/神綺/신키 (Touhou) This is the dataset of shinki/神綺/신키 (Touhou), containing 500 images and their tags. The core tags of this character are `long_hair, one_side_up, hair_ornament, wings, blue_eyes, white_hair`, which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). ## List of Packages | Name | Images | Size | Download | Type | Description | |:-----------------|---------:|:-----------|:---------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------| | raw | 500 | 471.06 MiB | [Download](https://huggingface.co/datasets/CyberHarem/shinki_touhou/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). | | 800 | 500 | 327.52 MiB | [Download](https://huggingface.co/datasets/CyberHarem/shinki_touhou/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. | | stage3-p480-800 | 964 | 589.28 MiB | [Download](https://huggingface.co/datasets/CyberHarem/shinki_touhou/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | | 1200 | 500 | 438.49 MiB | [Download](https://huggingface.co/datasets/CyberHarem/shinki_touhou/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. | | stage3-p480-1200 | 964 | 742.03 MiB | [Download](https://huggingface.co/datasets/CyberHarem/shinki_touhou/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code ```python import os import zipfile from huggingface_hub import hf_hub_download from waifuc.source import LocalSource # download raw archive file zip_file = hf_hub_download( repo_id='CyberHarem/shinki_touhou', repo_type='dataset', filename='dataset-raw.zip', ) # extract files to your directory dataset_dir = 'dataset_dir' os.makedirs(dataset_dir, exist_ok=True) with zipfile.ZipFile(zip_file, 'r') as zf: zf.extractall(dataset_dir) # load the dataset with waifuc source = LocalSource(dataset_dir) for item in source: print(item.image, item.meta['filename'], item.meta['tags']) ``` ## List of Clusters List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | 0 | 7 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | 1girl, dress, hair_bobbles, smile, solo, capelet | | 1 | 10 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | 1girl, dress, hair_bobbles, red_capelet, solo, red_eyes, smile, multiple_wings | | 2 | 5 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | 1girl, hair_bobbles, looking_at_viewer, red_capelet, simple_background, smile, solo, white_background, long_sleeves, multiple_wings, very_long_hair, red_dress, grey_hair, purple_eyes, wide_sleeves | | 3 | 13 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | 1girl, bangs, hair_bobbles, looking_at_viewer, red_capelet, red_dress, solo, closed_mouth, smile, long_sleeves, wide_sleeves, grey_eyes, grey_hair, blush, simple_background, white_background, ribbon, multiple_wings, upper_body, very_long_hair | | 4 | 5 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | 1girl, bare_shoulders, collarbone, grey_hair, hair_bobbles, large_breasts, lips, long_sleeves, looking_at_viewer, off_shoulder, red_dress, simple_background, smile, solo, white_background, closed_mouth, grey_eyes, very_long_hair, upper_body, cleavage, cowboy_shot, criss-cross_halter, navel, one-hour_drawing_challenge, parted_bangs, red_capelet, stomach, turtleneck, underboob, white_bikini | ### Table Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 1girl | dress | hair_bobbles | smile | solo | capelet | red_capelet | red_eyes | multiple_wings | looking_at_viewer | simple_background | white_background | long_sleeves | very_long_hair | red_dress | grey_hair | purple_eyes | wide_sleeves | bangs | closed_mouth | grey_eyes | blush | ribbon | upper_body | bare_shoulders | collarbone | large_breasts | lips | off_shoulder | cleavage | cowboy_shot | criss-cross_halter | navel | one-hour_drawing_challenge | parted_bangs | stomach | turtleneck | underboob | white_bikini | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------|:--------|:---------------|:--------|:-------|:----------|:--------------|:-----------|:-----------------|:--------------------|:--------------------|:-------------------|:---------------|:-----------------|:------------|:------------|:--------------|:---------------|:--------|:---------------|:------------|:--------|:---------|:-------------|:-----------------|:-------------|:----------------|:-------|:---------------|:-----------|:--------------|:---------------------|:--------|:-----------------------------|:---------------|:----------|:-------------|:------------|:---------------| | 0 | 7 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 1 | 10 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | X | X | X | X | X | | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 2 | 5 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | X | | X | X | X | | X | | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | 3 | 13 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | X | | X | X | X | | X | | X | X | X | X | X | X | X | X | | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | 4 | 5 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | X | | X | X | X | | X | | | X | X | X | X | X | X | X | | | | X | X | | | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X |
CyberHarem/shinki_touhou
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2023-08-18T08:33:52+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2024-01-14T23:19:47+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of shinki/神綺/신키 (Touhou) ================================ This is the dataset of shinki/神綺/신키 (Touhou), containing 500 images and their tags. The core tags of this character are 'long\_hair, one\_side\_up, hair\_ornament, wings, blue\_eyes, white\_hair', which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization). List of Packages ---------------- ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code List of Clusters ---------------- List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version ### Table Version
[ "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n", "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ 44, 61, 5, 4 ]
[ "passage: TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.### Raw Text Version### Table Version" ]
564a0597c9da44e0a05353c185268fecb8363a2b
# Dataset of murasa_minamitsu/村紗水蜜/무라사미나미츠 (Touhou) This is the dataset of murasa_minamitsu/村紗水蜜/무라사미나미츠 (Touhou), containing 500 images and their tags. The core tags of this character are `short_hair, hat, black_hair, sailor_hat, green_eyes`, which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). ## List of Packages | Name | Images | Size | Download | Type | Description | |:-----------------|---------:|:-----------|:-------------------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------| | raw | 500 | 569.90 MiB | [Download](https://huggingface.co/datasets/CyberHarem/murasa_minamitsu_touhou/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). | | 800 | 500 | 365.39 MiB | [Download](https://huggingface.co/datasets/CyberHarem/murasa_minamitsu_touhou/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. | | stage3-p480-800 | 1104 | 701.54 MiB | [Download](https://huggingface.co/datasets/CyberHarem/murasa_minamitsu_touhou/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | | 1200 | 500 | 522.44 MiB | [Download](https://huggingface.co/datasets/CyberHarem/murasa_minamitsu_touhou/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. | | stage3-p480-1200 | 1104 | 916.45 MiB | [Download](https://huggingface.co/datasets/CyberHarem/murasa_minamitsu_touhou/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code ```python import os import zipfile from huggingface_hub import hf_hub_download from waifuc.source import LocalSource # download raw archive file zip_file = hf_hub_download( repo_id='CyberHarem/murasa_minamitsu_touhou', repo_type='dataset', filename='dataset-raw.zip', ) # extract files to your directory dataset_dir = 'dataset_dir' os.makedirs(dataset_dir, exist_ok=True) with zipfile.ZipFile(zip_file, 'r') as zf: zf.extractall(dataset_dir) # load the dataset with waifuc source = LocalSource(dataset_dir) for item in source: print(item.image, item.meta['filename'], item.meta['tags']) ``` ## List of Clusters List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | 0 | 18 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | 1girl, anchor, solo, hishaku, sailor_collar, smile | | 1 | 7 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | 1girl, anchor, blue_eyes, sailor, shorts, solo, smile | | 2 | 5 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | 1girl, anchor, belt, sailor, shorts, solo, smile, closed_eyes | | 3 | 10 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | 1girl, solo, white_shorts, anchor, navel, sailor_collar, midriff, smile, blue_eyes, hishaku, open_mouth, belt, chain | | 4 | 8 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | 1girl, looking_at_viewer, red_neckerchief, short_sleeves, solo, white_headwear, white_shirt, white_shorts, bangs, holding, sailor_shirt, anchor_symbol, cowboy_shot, hishaku, closed_mouth, hair_between_eyes, simple_background, smile, green_sailor_collar, water, white_background | | 5 | 11 | ![](samples/5/clu5-sample0.png) | ![](samples/5/clu5-sample1.png) | ![](samples/5/clu5-sample2.png) | ![](samples/5/clu5-sample3.png) | ![](samples/5/clu5-sample4.png) | 1girl, anchor_symbol, black_footwear, looking_at_viewer, red_neckerchief, sailor_shirt, smile, solo, white_headwear, white_shirt, white_shorts, bangs, boots, green_sailor_collar, white_socks, full_body, hishaku, holding, midriff, navel, puffy_short_sleeves, water, white_background, closed_mouth, open_mouth, kneehighs, ladle, stomach | | 6 | 10 | ![](samples/6/clu6-sample0.png) | ![](samples/6/clu6-sample1.png) | ![](samples/6/clu6-sample2.png) | ![](samples/6/clu6-sample3.png) | ![](samples/6/clu6-sample4.png) | 1girl, anchor_symbol, bangs, red_neckerchief, sailor_shirt, short_sleeves, smile, solo, white_headwear, white_shirt, white_shorts, looking_at_viewer, white_gloves, jacket, black_coat, closed_mouth, white_background, green_sailor_collar, simple_background, black_footwear, cowboy_shot, midriff, black_belt, black_socks, full_body, kneehighs, navel | ### Table Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 1girl | anchor | solo | hishaku | sailor_collar | smile | blue_eyes | sailor | shorts | belt | closed_eyes | white_shorts | navel | midriff | open_mouth | chain | looking_at_viewer | red_neckerchief | short_sleeves | white_headwear | white_shirt | bangs | holding | sailor_shirt | anchor_symbol | cowboy_shot | closed_mouth | hair_between_eyes | simple_background | green_sailor_collar | water | white_background | black_footwear | boots | white_socks | full_body | puffy_short_sleeves | kneehighs | ladle | stomach | white_gloves | jacket | black_coat | black_belt | black_socks | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------|:---------|:-------|:----------|:----------------|:--------|:------------|:---------|:---------|:-------|:--------------|:---------------|:--------|:----------|:-------------|:--------|:--------------------|:------------------|:----------------|:-----------------|:--------------|:--------|:----------|:---------------|:----------------|:--------------|:---------------|:--------------------|:--------------------|:----------------------|:--------|:-------------------|:-----------------|:--------|:--------------|:------------|:----------------------|:------------|:--------|:----------|:---------------|:---------|:-------------|:-------------|:--------------| | 0 | 18 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 1 | 7 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | X | X | X | | | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 2 | 5 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | X | X | X | | | X | | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 3 | 10 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | X | X | X | X | X | X | X | | | X | | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 4 | 8 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | X | | X | X | | X | | | | | | X | | | | | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | 5 | 11 | ![](samples/5/clu5-sample0.png) | ![](samples/5/clu5-sample1.png) | ![](samples/5/clu5-sample2.png) | ![](samples/5/clu5-sample3.png) | ![](samples/5/clu5-sample4.png) | X | | X | X | | X | | | | | | X | X | X | X | | X | X | | X | X | X | X | X | X | | X | | | X | X | X | X | X | X | X | X | X | X | X | | | | | | | 6 | 10 | ![](samples/6/clu6-sample0.png) | ![](samples/6/clu6-sample1.png) | ![](samples/6/clu6-sample2.png) | ![](samples/6/clu6-sample3.png) | ![](samples/6/clu6-sample4.png) | X | | X | | | X | | | | | | X | X | X | | | X | X | X | X | X | X | | X | X | X | X | | X | X | | X | X | | | X | | X | | | X | X | X | X | X |
CyberHarem/murasa_minamitsu_touhou
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2023-08-18T08:34:18+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2024-01-14T17:20:24+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of murasa\_minamitsu/村紗水蜜/무라사미나미츠 (Touhou) ================================================== This is the dataset of murasa\_minamitsu/村紗水蜜/무라사미나미츠 (Touhou), containing 500 images and their tags. The core tags of this character are 'short\_hair, hat, black\_hair, sailor\_hat, green\_eyes', which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization). List of Packages ---------------- ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code List of Clusters ---------------- List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version ### Table Version
[ "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n", "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ 44, 61, 5, 4 ]
[ "passage: TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.### Raw Text Version### Table Version" ]
41d6e846fdd8287875f62dff022417ad954b9a0f
# Dataset Card for "docqa_train" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
dhiruHF/docqa_train
[ "region:us" ]
2023-08-18T08:42:14+00:00
{"dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 6148607, "num_examples": 1500}], "download_size": 3620848, "dataset_size": 6148607}}
2023-08-18T08:42:24+00:00
[]
[]
TAGS #region-us
# Dataset Card for "docqa_train" More Information needed
[ "# Dataset Card for \"docqa_train\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"docqa_train\"\n\nMore Information needed" ]
[ 6, 15 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"docqa_train\"\n\nMore Information needed" ]
bc28bdc70583f90b782854c9ffb1b5d1af1ca172
# Dataset of tsukumo_benben/九十九弁々 (Touhou) This is the dataset of tsukumo_benben/九十九弁々 (Touhou), containing 84 images and their tags. The core tags of this character are `long_hair, purple_hair, hair_flower, hair_ornament, twintails, purple_eyes, very_long_hair`, which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). ## List of Packages | Name | Images | Size | Download | Type | Description | |:-----------------|---------:|:-----------|:-----------------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------| | raw | 84 | 114.79 MiB | [Download](https://huggingface.co/datasets/CyberHarem/tsukumo_benben_touhou/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). | | 800 | 84 | 74.46 MiB | [Download](https://huggingface.co/datasets/CyberHarem/tsukumo_benben_touhou/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. | | stage3-p480-800 | 174 | 125.53 MiB | [Download](https://huggingface.co/datasets/CyberHarem/tsukumo_benben_touhou/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | | 1200 | 84 | 104.78 MiB | [Download](https://huggingface.co/datasets/CyberHarem/tsukumo_benben_touhou/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. | | stage3-p480-1200 | 174 | 162.09 MiB | [Download](https://huggingface.co/datasets/CyberHarem/tsukumo_benben_touhou/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code ```python import os import zipfile from huggingface_hub import hf_hub_download from waifuc.source import LocalSource # download raw archive file zip_file = hf_hub_download( repo_id='CyberHarem/tsukumo_benben_touhou', repo_type='dataset', filename='dataset-raw.zip', ) # extract files to your directory dataset_dir = 'dataset_dir' os.makedirs(dataset_dir, exist_ok=True) with zipfile.ZipFile(zip_file, 'r') as zf: zf.extractall(dataset_dir) # load the dataset with waifuc source = LocalSource(dataset_dir) for item in source: print(item.image, item.meta['filename'], item.meta['tags']) ``` ## List of Clusters List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------------------------------------------------------------------------------------------------------| | 0 | 13 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | 1girl, biwa_lute, dress, flower, solo, chain, long_sleeves, musical_note, smile, looking_at_viewer, barefoot, open_mouth | | 1 | 8 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | 1girl, biwa_lute, dress, flower, smile, solo, long_sleeves, playing_instrument, closed_eyes, chain, quarter_note, staff_(music) | ### Table Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 1girl | biwa_lute | dress | flower | solo | chain | long_sleeves | musical_note | smile | looking_at_viewer | barefoot | open_mouth | playing_instrument | closed_eyes | quarter_note | staff_(music) | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------|:------------|:--------|:---------|:-------|:--------|:---------------|:---------------|:--------|:--------------------|:-----------|:-------------|:---------------------|:--------------|:---------------|:----------------| | 0 | 13 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | 1 | 8 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | X | X | X | X | X | X | X | | X | | | | X | X | X | X |
CyberHarem/tsukumo_benben_touhou
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2023-08-18T08:55:42+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2024-01-14T23:37:20+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of tsukumo\_benben/九十九弁々 (Touhou) ========================================= This is the dataset of tsukumo\_benben/九十九弁々 (Touhou), containing 84 images and their tags. The core tags of this character are 'long\_hair, purple\_hair, hair\_flower, hair\_ornament, twintails, purple\_eyes, very\_long\_hair', which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization). List of Packages ---------------- ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code List of Clusters ---------------- List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version ### Table Version
[ "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n", "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ 44, 61, 5, 4 ]
[ "passage: TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.### Raw Text Version### Table Version" ]
4dc23ce834d829ff72b371204fc2c23502c49a75
# Dataset Card for "MNIST-preprocessed" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
SneakyInsect/MNIST-preprocessed
[ "region:us" ]
2023-08-18T09:01:52+00:00
{"dataset_info": {"features": [{"name": "image", "sequence": {"sequence": "uint8"}}, {"name": "label", "dtype": "int64"}, {"name": "embedding", "sequence": "float32"}], "splits": [{"name": "train", "num_bytes": 62400000, "num_examples": 60000}, {"name": "test", "num_bytes": 10400000, "num_examples": 10000}], "download_size": 29807470, "dataset_size": 72800000}}
2023-08-21T05:50:58+00:00
[]
[]
TAGS #region-us
# Dataset Card for "MNIST-preprocessed" More Information needed
[ "# Dataset Card for \"MNIST-preprocessed\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"MNIST-preprocessed\"\n\nMore Information needed" ]
[ 6, 16 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"MNIST-preprocessed\"\n\nMore Information needed" ]
82fdd2b7f454e62715e0d7d940cafcad081a66fb
--- license: bsd --- ## hh-rlhf中文翻译版本 基于Anthropic论文Training a Helpful and Harmless Assistant with Reinforcement Learning from Human Feedback 开源的helpful 和harmless数据,使用翻译工具进行了翻译。 hh_rlhf_train.jsonl 合并中英文训练集数据 清洗过后17万条 hh_rlhf_test.jsonl 合并中英文测试集数据 清洗过后9千条 harmless_base_cn_train.jsonl 42394条 harmless_base_cn_test.jsonl 2304条 helpful_base_cn_train.jsonl 43722条 helpful_base_cn_test.jsonl 2346条 ## 实验报告 相关rlhf实验报告:https://zhuanlan.zhihu.com/p/652044120
dikw/hh_rlhf_cn
[ "license:llama2", "region:us" ]
2023-08-18T09:09:26+00:00
{"license": "llama2"}
2023-08-24T04:51:47+00:00
[]
[]
TAGS #license-llama2 #region-us
--- license: bsd --- ## hh-rlhf中文翻译版本 基于Anthropic论文Training a Helpful and Harmless Assistant with Reinforcement Learning from Human Feedback 开源的helpful 和harmless数据,使用翻译工具进行了翻译。 hh_rlhf_train.jsonl 合并中英文训练集数据 清洗过后17万条 hh_rlhf_test.jsonl 合并中英文测试集数据 清洗过后9千条 harmless_base_cn_train.jsonl 42394条 harmless_base_cn_test.jsonl 2304条 helpful_base_cn_train.jsonl 43722条 helpful_base_cn_test.jsonl 2346条 ## 实验报告 相关rlhf实验报告:URL
[ "## hh-rlhf中文翻译版本\n基于Anthropic论文Training a Helpful and Harmless Assistant with Reinforcement Learning from Human Feedback 开源的helpful 和harmless数据,使用翻译工具进行了翻译。 \nhh_rlhf_train.jsonl 合并中英文训练集数据 清洗过后17万条 \nhh_rlhf_test.jsonl 合并中英文测试集数据 清洗过后9千条 \nharmless_base_cn_train.jsonl 42394条 \nharmless_base_cn_test.jsonl 2304条 \nhelpful_base_cn_train.jsonl 43722条 \nhelpful_base_cn_test.jsonl 2346条", "## 实验报告\n相关rlhf实验报告:URL" ]
[ "TAGS\n#license-llama2 #region-us \n", "## hh-rlhf中文翻译版本\n基于Anthropic论文Training a Helpful and Harmless Assistant with Reinforcement Learning from Human Feedback 开源的helpful 和harmless数据,使用翻译工具进行了翻译。 \nhh_rlhf_train.jsonl 合并中英文训练集数据 清洗过后17万条 \nhh_rlhf_test.jsonl 合并中英文测试集数据 清洗过后9千条 \nharmless_base_cn_train.jsonl 42394条 \nharmless_base_cn_test.jsonl 2304条 \nhelpful_base_cn_train.jsonl 43722条 \nhelpful_base_cn_test.jsonl 2346条", "## 实验报告\n相关rlhf实验报告:URL" ]
[ 13, 170, 13 ]
[ "passage: TAGS\n#license-llama2 #region-us \n## hh-rlhf中文翻译版本\n基于Anthropic论文Training a Helpful and Harmless Assistant with Reinforcement Learning from Human Feedback 开源的helpful 和harmless数据,使用翻译工具进行了翻译。 \nhh_rlhf_train.jsonl 合并中英文训练集数据 清洗过后17万条 \nhh_rlhf_test.jsonl 合并中英文测试集数据 清洗过后9千条 \nharmless_base_cn_train.jsonl 42394条 \nharmless_base_cn_test.jsonl 2304条 \nhelpful_base_cn_train.jsonl 43722条 \nhelpful_base_cn_test.jsonl 2346条## 实验报告\n相关rlhf实验报告:URL" ]
302e2dbe0807050fff3aed4e98363d7125e5a2e2
# Dataset of kasodani_kyouko/幽谷響子 (Touhou) This is the dataset of kasodani_kyouko/幽谷響子 (Touhou), containing 500 images and their tags. The core tags of this character are `green_hair, short_hair, animal_ears, green_eyes, dog_ears, tail`, which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). ## List of Packages | Name | Images | Size | Download | Type | Description | |:-----------------|---------:|:-----------|:------------------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------| | raw | 500 | 462.20 MiB | [Download](https://huggingface.co/datasets/CyberHarem/kasodani_kyouko_touhou/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). | | 800 | 500 | 324.92 MiB | [Download](https://huggingface.co/datasets/CyberHarem/kasodani_kyouko_touhou/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. | | stage3-p480-800 | 1062 | 627.14 MiB | [Download](https://huggingface.co/datasets/CyberHarem/kasodani_kyouko_touhou/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | | 1200 | 500 | 434.90 MiB | [Download](https://huggingface.co/datasets/CyberHarem/kasodani_kyouko_touhou/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. | | stage3-p480-1200 | 1062 | 794.57 MiB | [Download](https://huggingface.co/datasets/CyberHarem/kasodani_kyouko_touhou/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code ```python import os import zipfile from huggingface_hub import hf_hub_download from waifuc.source import LocalSource # download raw archive file zip_file = hf_hub_download( repo_id='CyberHarem/kasodani_kyouko_touhou', repo_type='dataset', filename='dataset-raw.zip', ) # extract files to your directory dataset_dir = 'dataset_dir' os.makedirs(dataset_dir, exist_ok=True) with zipfile.ZipFile(zip_file, 'r') as zf: zf.extractall(dataset_dir) # load the dataset with waifuc source = LocalSource(dataset_dir) for item in source: print(item.image, item.meta['filename'], item.meta['tags']) ``` ## List of Clusters List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | 0 | 14 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | 1girl, open_mouth, smile, solo, bamboo_broom, dress, fang, blush | | 1 | 6 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | 1girl, long_sleeves, looking_at_viewer, simple_background, solo, bamboo_broom, blush, holding_broom, white_background, :d, open_mouth, pink_dress | | 2 | 18 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | 1girl, full_body, solo, white_socks, black_footwear, holding_broom, long_sleeves, open_mouth, pink_dress, shoes, looking_at_viewer, smile, bamboo_broom, simple_background, standing, white_background | | 3 | 7 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | 1girl, holding_broom, long_sleeves, pink_dress, solo, blush, looking_at_viewer, bangs, upper_body, hair_between_eyes, smile, bamboo_broom, closed_mouth, open_mouth | | 4 | 5 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | 1girl, bangs, full_body, long_sleeves, simple_background, solo, standing, white_background, white_socks, black_footwear, hair_between_eyes, open_mouth, pink_dress, shoes, :d, blush, dog_tail, looking_at_viewer, skin_fang, ahoge, brown_dress | | 5 | 5 | ![](samples/5/clu5-sample0.png) | ![](samples/5/clu5-sample1.png) | ![](samples/5/clu5-sample2.png) | ![](samples/5/clu5-sample3.png) | ![](samples/5/clu5-sample4.png) | 1boy, 1girl, blush, hetero, nipples, open_mouth, solo_focus, cum_in_pussy, looking_at_viewer, navel, penis, sex, small_breasts, vaginal, collarbone, dog_tail, spread_legs, bar_censor, bikini_bottom_aside, medium_breasts, on_back, smile, sweat | ### Table Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 1girl | open_mouth | smile | solo | bamboo_broom | dress | fang | blush | long_sleeves | looking_at_viewer | simple_background | holding_broom | white_background | :d | pink_dress | full_body | white_socks | black_footwear | shoes | standing | bangs | upper_body | hair_between_eyes | closed_mouth | dog_tail | skin_fang | ahoge | brown_dress | 1boy | hetero | nipples | solo_focus | cum_in_pussy | navel | penis | sex | small_breasts | vaginal | collarbone | spread_legs | bar_censor | bikini_bottom_aside | medium_breasts | on_back | sweat | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------|:-------------|:--------|:-------|:---------------|:--------|:-------|:--------|:---------------|:--------------------|:--------------------|:----------------|:-------------------|:-----|:-------------|:------------|:--------------|:-----------------|:--------|:-----------|:--------|:-------------|:--------------------|:---------------|:-----------|:------------|:--------|:--------------|:-------|:---------|:----------|:-------------|:---------------|:--------|:--------|:------|:----------------|:----------|:-------------|:--------------|:-------------|:----------------------|:-----------------|:----------|:--------| | 0 | 14 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 1 | 6 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | X | X | | X | X | | | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 2 | 18 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | X | X | X | X | X | | | | X | X | X | X | X | | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | 3 | 7 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | X | X | X | X | X | | | X | X | X | | X | | | X | | | | | | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | 4 | 5 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | X | X | | X | | | | X | X | X | X | | X | X | X | X | X | X | X | X | X | | X | | X | X | X | X | | | | | | | | | | | | | | | | | | | 5 | 5 | ![](samples/5/clu5-sample0.png) | ![](samples/5/clu5-sample1.png) | ![](samples/5/clu5-sample2.png) | ![](samples/5/clu5-sample3.png) | ![](samples/5/clu5-sample4.png) | X | X | X | | | | | X | | X | | | | | | | | | | | | | | | X | | | | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X |
CyberHarem/kasodani_kyouko_touhou
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2023-08-18T09:13:02+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2024-01-14T18:07:22+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of kasodani\_kyouko/幽谷響子 (Touhou) ========================================= This is the dataset of kasodani\_kyouko/幽谷響子 (Touhou), containing 500 images and their tags. The core tags of this character are 'green\_hair, short\_hair, animal\_ears, green\_eyes, dog\_ears, tail', which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization). List of Packages ---------------- ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code List of Clusters ---------------- List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version ### Table Version
[ "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n", "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ 44, 61, 5, 4 ]
[ "passage: TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.### Raw Text Version### Table Version" ]
333db257c3979f09e18c296c22d83da75d5fdd3f
# Dataset Card for "sentimen-ita-nllb" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Ocelot02/sentimen-ita-nllb
[ "region:us" ]
2023-08-18T09:27:49+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "label", "dtype": {"class_label": {"names": {"0": "negative", "1": "neutral", "2": "positive"}}}}, {"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 168194, "num_examples": 1839}, {"name": "validation", "num_bytes": 29721, "num_examples": 324}, {"name": "test", "num_bytes": 82541, "num_examples": 870}], "download_size": 175617, "dataset_size": 280456}}
2023-08-18T09:28:08+00:00
[]
[]
TAGS #region-us
# Dataset Card for "sentimen-ita-nllb" More Information needed
[ "# Dataset Card for \"sentimen-ita-nllb\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"sentimen-ita-nllb\"\n\nMore Information needed" ]
[ 6, 18 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"sentimen-ita-nllb\"\n\nMore Information needed" ]
7b18894cad54a8230375c6961601787fa1601c34
# Dataset of wriggle_nightbug/リグル・ナイトバグ/리글나이트버그 (Touhou) This is the dataset of wriggle_nightbug/リグル・ナイトバグ/리글나이트버그 (Touhou), containing 500 images and their tags. The core tags of this character are `green_hair, short_hair, green_eyes, bangs`, which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). ## List of Packages | Name | Images | Size | Download | Type | Description | |:-----------------|---------:|:-----------|:-------------------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------| | raw | 500 | 413.44 MiB | [Download](https://huggingface.co/datasets/CyberHarem/wriggle_nightbug_touhou/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). | | 800 | 500 | 290.57 MiB | [Download](https://huggingface.co/datasets/CyberHarem/wriggle_nightbug_touhou/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. | | stage3-p480-800 | 964 | 534.29 MiB | [Download](https://huggingface.co/datasets/CyberHarem/wriggle_nightbug_touhou/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | | 1200 | 500 | 384.62 MiB | [Download](https://huggingface.co/datasets/CyberHarem/wriggle_nightbug_touhou/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. | | stage3-p480-1200 | 964 | 672.49 MiB | [Download](https://huggingface.co/datasets/CyberHarem/wriggle_nightbug_touhou/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code ```python import os import zipfile from huggingface_hub import hf_hub_download from waifuc.source import LocalSource # download raw archive file zip_file = hf_hub_download( repo_id='CyberHarem/wriggle_nightbug_touhou', repo_type='dataset', filename='dataset-raw.zip', ) # extract files to your directory dataset_dir = 'dataset_dir' os.makedirs(dataset_dir, exist_ok=True) with zipfile.ZipFile(zip_file, 'r') as zf: zf.extractall(dataset_dir) # load the dataset with waifuc source = LocalSource(dataset_dir) for item in source: print(item.image, item.meta['filename'], item.meta['tags']) ``` ## List of Clusters List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | 0 | 10 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | 1girl, antennae, cape, solo, smile, shirt, pants, androgynous, reverse_trap | | 1 | 6 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | 1girl, antennae, cape, long_sleeves, shirt, solo, open_mouth, shorts, blush, fireflies, smile | | 2 | 26 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | 1girl, antennae, black_cape, solo, white_shirt, collared_shirt, long_sleeves, smile, looking_at_viewer, red_cape, two-sided_cape, closed_mouth, simple_background, full_body, shoes, white_background, white_socks, blush, puffy_sleeves, black_shorts, buttons, hair_between_eyes, blue_shorts | | 3 | 5 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | antennae, blush, navel, nipples, 1girl, completely_nude, open_mouth, small_breasts, solo_focus, 1boy, blue_eyes, hair_between_eyes, hetero, loli, onsen, sitting, water, censored, mixed_bathing, outdoors | | 4 | 20 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | 1girl, antennae, blush, hetero, nipples, 1boy, open_mouth, solo_focus, vaginal, navel, small_breasts, looking_at_viewer, penis, pov, feet_out_of_frame, sweat, completely_nude, mosaic_censoring, on_back, smile, hair_between_eyes, missionary, heart, motion_lines, cum_in_pussy, happy_sex, medium_breasts, pillow, spread_legs, straddling | | 5 | 8 | ![](samples/5/clu5-sample0.png) | ![](samples/5/clu5-sample1.png) | ![](samples/5/clu5-sample2.png) | ![](samples/5/clu5-sample3.png) | ![](samples/5/clu5-sample4.png) | 1girl, antennae, black_panties, simple_background, looking_at_viewer, navel, nipples, solo, topless, underwear_only, grey_background, small_breasts, black_thighhighs, sketch, blush, collarbone, open_mouth, smile | | 6 | 5 | ![](samples/6/clu6-sample0.png) | ![](samples/6/clu6-sample1.png) | ![](samples/6/clu6-sample2.png) | ![](samples/6/clu6-sample3.png) | ![](samples/6/clu6-sample4.png) | 2girls, antennae, blush, bottomless, cum, futa_with_female, implied_futanari, open_mouth, cape, closed_eyes, large_breasts, nipples, sex_from_behind, shirt | ### Table Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 1girl | antennae | cape | solo | smile | shirt | pants | androgynous | reverse_trap | long_sleeves | open_mouth | shorts | blush | fireflies | black_cape | white_shirt | collared_shirt | looking_at_viewer | red_cape | two-sided_cape | closed_mouth | simple_background | full_body | shoes | white_background | white_socks | puffy_sleeves | black_shorts | buttons | hair_between_eyes | blue_shorts | navel | nipples | completely_nude | small_breasts | solo_focus | 1boy | blue_eyes | hetero | loli | onsen | sitting | water | censored | mixed_bathing | outdoors | vaginal | penis | pov | feet_out_of_frame | sweat | mosaic_censoring | on_back | missionary | heart | motion_lines | cum_in_pussy | happy_sex | medium_breasts | pillow | spread_legs | straddling | black_panties | topless | underwear_only | grey_background | black_thighhighs | sketch | collarbone | 2girls | bottomless | cum | futa_with_female | implied_futanari | closed_eyes | large_breasts | sex_from_behind | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------|:-----------|:-------|:-------|:--------|:--------|:--------|:--------------|:---------------|:---------------|:-------------|:---------|:--------|:------------|:-------------|:--------------|:-----------------|:--------------------|:-----------|:-----------------|:---------------|:--------------------|:------------|:--------|:-------------------|:--------------|:----------------|:---------------|:----------|:--------------------|:--------------|:--------|:----------|:------------------|:----------------|:-------------|:-------|:------------|:---------|:-------|:--------|:----------|:--------|:-----------|:----------------|:-----------|:----------|:--------|:------|:--------------------|:--------|:-------------------|:----------|:-------------|:--------|:---------------|:---------------|:------------|:-----------------|:---------|:--------------|:-------------|:----------------|:----------|:-----------------|:------------------|:-------------------|:---------|:-------------|:---------|:-------------|:------|:-------------------|:-------------------|:--------------|:----------------|:------------------| | 0 | 10 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 1 | 6 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | X | X | X | X | X | X | | | | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 2 | 26 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | X | X | | X | X | | | | | X | | | X | | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 3 | 5 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | X | X | | | | | | | | | X | | X | | | | | | | | | | | | | | | | | X | | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 4 | 20 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | X | X | | | X | | | | | | X | | X | | | | | X | | | | | | | | | | | | X | | X | X | X | X | X | X | | X | | | | | | | | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | 5 | 8 | ![](samples/5/clu5-sample0.png) | ![](samples/5/clu5-sample1.png) | ![](samples/5/clu5-sample2.png) | ![](samples/5/clu5-sample3.png) | ![](samples/5/clu5-sample4.png) | X | X | | X | X | | | | | | X | | X | | | | | X | | | | X | | | | | | | | | | X | X | | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | X | X | X | X | X | X | X | | | | | | | | | | 6 | 5 | ![](samples/6/clu6-sample0.png) | ![](samples/6/clu6-sample1.png) | ![](samples/6/clu6-sample2.png) | ![](samples/6/clu6-sample3.png) | ![](samples/6/clu6-sample4.png) | | X | X | | | X | | | | | X | | X | | | | | | | | | | | | | | | | | | | | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | X | X | X | X | X | X | X | X |
CyberHarem/wriggle_nightbug_touhou
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2023-08-18T09:51:08+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2024-01-14T16:32:45+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of wriggle\_nightbug/リグル・ナイトバグ/리글나이트버그 (Touhou) ======================================================= This is the dataset of wriggle\_nightbug/リグル・ナイトバグ/리글나이트버그 (Touhou), containing 500 images and their tags. The core tags of this character are 'green\_hair, short\_hair, green\_eyes, bangs', which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization). List of Packages ---------------- ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code List of Clusters ---------------- List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version ### Table Version
[ "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n", "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ 44, 61, 5, 4 ]
[ "passage: TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.### Raw Text Version### Table Version" ]
4ff77791f25e4b8db55daa28f4e122198f9d6e9a
# Dataset Card for "llama2-german-corpus-tokenized-llama-chunk-4096" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
philschmid/llama2-german-corpus-tokenized-llama-chunk-4096
[ "region:us" ]
2023-08-18T09:51:09+00:00
{"dataset_info": {"features": [{"name": "input_ids", "sequence": "int32"}, {"name": "token_type_ids", "sequence": "int8"}, {"name": "attention_mask", "sequence": "int8"}, {"name": "labels", "sequence": "int64"}], "splits": [{"name": "train", "num_bytes": 1190392538880, "num_examples": 20753008}], "download_size": 307400657843, "dataset_size": 1190392538880}}
2023-08-18T16:33:00+00:00
[]
[]
TAGS #region-us
# Dataset Card for "llama2-german-corpus-tokenized-llama-chunk-4096" More Information needed
[ "# Dataset Card for \"llama2-german-corpus-tokenized-llama-chunk-4096\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"llama2-german-corpus-tokenized-llama-chunk-4096\"\n\nMore Information needed" ]
[ 6, 30 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"llama2-german-corpus-tokenized-llama-chunk-4096\"\n\nMore Information needed" ]
c8fb83acd87d4b536ea706b520876fa0c816d6b9
# Dataset Card for "squad" ## Table of Contents - [Dataset Card for "squad"](#dataset-card-for-squad) - [Table of Contents](#table-of-contents) - [Dataset Description](#dataset-description) - [Dataset Summary](#dataset-summary) - [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards) - [Languages](#languages) - [Dataset Structure](#dataset-structure) - [Data Instances](#data-instances) - [plain_text](#plain_text) - [Data Fields](#data-fields) - [plain_text](#plain_text-1) - [Data Splits](#data-splits) - [Dataset Creation](#dataset-creation) - [Curation Rationale](#curation-rationale) - [Source Data](#source-data) - [Initial Data Collection and Normalization](#initial-data-collection-and-normalization) - [Who are the source language producers?](#who-are-the-source-language-producers) - [Annotations](#annotations) - [Annotation process](#annotation-process) - [Who are the annotators?](#who-are-the-annotators) - [Personal and Sensitive Information](#personal-and-sensitive-information) - [Considerations for Using the Data](#considerations-for-using-the-data) - [Social Impact of Dataset](#social-impact-of-dataset) - [Discussion of Biases](#discussion-of-biases) - [Other Known Limitations](#other-known-limitations) - [Additional Information](#additional-information) - [Dataset Curators](#dataset-curators) - [Licensing Information](#licensing-information) - [Citation Information](#citation-information) - [Contributions](#contributions) ## Dataset Description - **Homepage:** [https://rajpurkar.github.io/SQuAD-explorer/](https://rajpurkar.github.io/SQuAD-explorer/) - **Repository:** [More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) - **Paper:** [More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) - **Point of Contact:** [More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) - **Size of downloaded dataset files:** 35.14 MB - **Size of the generated dataset:** 89.92 MB - **Total amount of disk used:** 125.06 MB ### Dataset Summary Stanford Question Answering Dataset (SQuAD) is a reading comprehension dataset, consisting of questions posed by crowdworkers on a set of Wikipedia articles, where the answer to every question is a segment of text, or span, from the corresponding reading passage, or the question might be unanswerable. ### Supported Tasks and Leaderboards [More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) ### Languages [More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) ## Dataset Structure ### Data Instances #### plain_text - **Size of downloaded dataset files:** 35.14 MB - **Size of the generated dataset:** 89.92 MB - **Total amount of disk used:** 125.06 MB An example of 'train' looks as follows. ``` { "answers": { "answer_start": [1], "text": ["This is a test text"] }, "context": "This is a test context.", "id": "1", "question": "Is this a test?", "title": "train test" } ``` ### Data Fields The data fields are the same among all splits. #### plain_text - `id`: a `string` feature. - `title`: a `string` feature. - `context`: a `string` feature. - `question`: a `string` feature. - `answers`: a dictionary feature containing: - `text`: a `string` feature. - `answer_start`: a `int32` feature. ### Data Splits | name |train|validation| |----------|----:|---------:| |plain_text|87599| 10570| ## Dataset Creation ### Curation Rationale [More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) ### Source Data #### Initial Data Collection and Normalization [More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) #### Who are the source language producers? [More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) ### Annotations #### Annotation process [More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) #### Who are the annotators? [More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) ### Personal and Sensitive Information [More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) ### Discussion of Biases [More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) ### Other Known Limitations [More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) ## Additional Information ### Dataset Curators [More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) ### Licensing Information [More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) ### Citation Information ``` @article{2016arXiv160605250R, author = {{Rajpurkar}, Pranav and {Zhang}, Jian and {Lopyrev}, Konstantin and {Liang}, Percy}, title = "{SQuAD: 100,000+ Questions for Machine Comprehension of Text}", journal = {arXiv e-prints}, year = 2016, eid = {arXiv:1606.05250}, pages = {arXiv:1606.05250}, archivePrefix = {arXiv}, eprint = {1606.05250}, } ``` ### Contributions Thanks to [@lewtun](https://github.com/lewtun), [@albertvillanova](https://github.com/albertvillanova), [@patrickvonplaten](https://github.com/patrickvonplaten), [@thomwolf](https://github.com/thomwolf) for adding this dataset.
lhoestq/squad
[ "task_categories:question-answering", "task_ids:extractive-qa", "annotations_creators:crowdsourced", "language_creators:crowdsourced", "language_creators:found", "multilinguality:monolingual", "size_categories:10K<n<100K", "source_datasets:extended|wikipedia", "language:en", "license:cc-by-4.0", "arxiv:1606.05250", "region:us" ]
2023-08-18T09:52:20+00:00
{"annotations_creators": ["crowdsourced"], "language_creators": ["crowdsourced", "found"], "language": ["en"], "license": ["cc-by-4.0"], "multilinguality": ["monolingual"], "size_categories": ["10K<n<100K"], "source_datasets": ["extended|wikipedia"], "task_categories": ["question-answering"], "task_ids": ["extractive-qa"], "paperswithcode_id": "squad", "pretty_name": "SQuAD", "dataset_info": {"features": [{"name": "id", "dtype": "string"}, {"name": "title", "dtype": "string"}, {"name": "context", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "answers", "sequence": [{"name": "text", "dtype": "string"}, {"name": "answer_start", "dtype": "int32"}]}], "config_name": "plain_text", "splits": [{"name": "train", "num_bytes": 79317110, "num_examples": 87599}, {"name": "validation", "num_bytes": 10472653, "num_examples": 10570}], "download_size": 35142551, "dataset_size": 89789763}, "train-eval-index": [{"config": "plain_text", "task": "question-answering", "task_id": "extractive_question_answering", "splits": {"train_split": "train", "eval_split": "validation"}, "col_mapping": {"question": "question", "context": "context", "answers": {"text": "text", "answer_start": "answer_start"}}, "metrics": [{"type": "squad", "name": "SQuAD"}]}]}
2023-08-18T09:52:41+00:00
[ "1606.05250" ]
[ "en" ]
TAGS #task_categories-question-answering #task_ids-extractive-qa #annotations_creators-crowdsourced #language_creators-crowdsourced #language_creators-found #multilinguality-monolingual #size_categories-10K<n<100K #source_datasets-extended|wikipedia #language-English #license-cc-by-4.0 #arxiv-1606.05250 #region-us
Dataset Card for "squad" ======================== Table of Contents ----------------- * Dataset Card for "squad" + Table of Contents + Dataset Description - Dataset Summary - Supported Tasks and Leaderboards - Languages + Dataset Structure - Data Instances * plain\_text - Data Fields * plain\_text - Data Splits + Dataset Creation - Curation Rationale - Source Data * Initial Data Collection and Normalization * Who are the source language producers? - Annotations * Annotation process * Who are the annotators? - Personal and Sensitive Information + Considerations for Using the Data - Social Impact of Dataset - Discussion of Biases - Other Known Limitations + Additional Information - Dataset Curators - Licensing Information - Citation Information - Contributions Dataset Description ------------------- * Homepage: URL * Repository: * Paper: * Point of Contact: * Size of downloaded dataset files: 35.14 MB * Size of the generated dataset: 89.92 MB * Total amount of disk used: 125.06 MB ### Dataset Summary Stanford Question Answering Dataset (SQuAD) is a reading comprehension dataset, consisting of questions posed by crowdworkers on a set of Wikipedia articles, where the answer to every question is a segment of text, or span, from the corresponding reading passage, or the question might be unanswerable. ### Supported Tasks and Leaderboards ### Languages Dataset Structure ----------------- ### Data Instances #### plain\_text * Size of downloaded dataset files: 35.14 MB * Size of the generated dataset: 89.92 MB * Total amount of disk used: 125.06 MB An example of 'train' looks as follows. ### Data Fields The data fields are the same among all splits. #### plain\_text * 'id': a 'string' feature. * 'title': a 'string' feature. * 'context': a 'string' feature. * 'question': a 'string' feature. * 'answers': a dictionary feature containing: + 'text': a 'string' feature. + 'answer\_start': a 'int32' feature. ### Data Splits Dataset Creation ---------------- ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information Considerations for Using the Data --------------------------------- ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations Additional Information ---------------------- ### Dataset Curators ### Licensing Information ### Contributions Thanks to @lewtun, @albertvillanova, @patrickvonplaten, @thomwolf for adding this dataset.
[ "### Dataset Summary\n\n\nStanford Question Answering Dataset (SQuAD) is a reading comprehension dataset, consisting of questions posed by crowdworkers on a set of Wikipedia articles, where the answer to every question is a segment of text, or span, from the corresponding reading passage, or the question might be unanswerable.", "### Supported Tasks and Leaderboards", "### Languages\n\n\nDataset Structure\n-----------------", "### Data Instances", "#### plain\\_text\n\n\n* Size of downloaded dataset files: 35.14 MB\n* Size of the generated dataset: 89.92 MB\n* Total amount of disk used: 125.06 MB\n\n\nAn example of 'train' looks as follows.", "### Data Fields\n\n\nThe data fields are the same among all splits.", "#### plain\\_text\n\n\n* 'id': a 'string' feature.\n* 'title': a 'string' feature.\n* 'context': a 'string' feature.\n* 'question': a 'string' feature.\n* 'answers': a dictionary feature containing:\n\t+ 'text': a 'string' feature.\n\t+ 'answer\\_start': a 'int32' feature.", "### Data Splits\n\n\n\nDataset Creation\n----------------", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information\n\n\nConsiderations for Using the Data\n---------------------------------", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations\n\n\nAdditional Information\n----------------------", "### Dataset Curators", "### Licensing Information", "### Contributions\n\n\nThanks to @lewtun, @albertvillanova, @patrickvonplaten, @thomwolf for adding this dataset." ]
[ "TAGS\n#task_categories-question-answering #task_ids-extractive-qa #annotations_creators-crowdsourced #language_creators-crowdsourced #language_creators-found #multilinguality-monolingual #size_categories-10K<n<100K #source_datasets-extended|wikipedia #language-English #license-cc-by-4.0 #arxiv-1606.05250 #region-us \n", "### Dataset Summary\n\n\nStanford Question Answering Dataset (SQuAD) is a reading comprehension dataset, consisting of questions posed by crowdworkers on a set of Wikipedia articles, where the answer to every question is a segment of text, or span, from the corresponding reading passage, or the question might be unanswerable.", "### Supported Tasks and Leaderboards", "### Languages\n\n\nDataset Structure\n-----------------", "### Data Instances", "#### plain\\_text\n\n\n* Size of downloaded dataset files: 35.14 MB\n* Size of the generated dataset: 89.92 MB\n* Total amount of disk used: 125.06 MB\n\n\nAn example of 'train' looks as follows.", "### Data Fields\n\n\nThe data fields are the same among all splits.", "#### plain\\_text\n\n\n* 'id': a 'string' feature.\n* 'title': a 'string' feature.\n* 'context': a 'string' feature.\n* 'question': a 'string' feature.\n* 'answers': a dictionary feature containing:\n\t+ 'text': a 'string' feature.\n\t+ 'answer\\_start': a 'int32' feature.", "### Data Splits\n\n\n\nDataset Creation\n----------------", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information\n\n\nConsiderations for Using the Data\n---------------------------------", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations\n\n\nAdditional Information\n----------------------", "### Dataset Curators", "### Licensing Information", "### Contributions\n\n\nThanks to @lewtun, @albertvillanova, @patrickvonplaten, @thomwolf for adding this dataset." ]
[ 114, 74, 10, 11, 6, 55, 17, 93, 11, 7, 4, 10, 10, 5, 5, 9, 18, 7, 8, 14, 6, 6, 34 ]
[ "passage: TAGS\n#task_categories-question-answering #task_ids-extractive-qa #annotations_creators-crowdsourced #language_creators-crowdsourced #language_creators-found #multilinguality-monolingual #size_categories-10K<n<100K #source_datasets-extended|wikipedia #language-English #license-cc-by-4.0 #arxiv-1606.05250 #region-us \n### Dataset Summary\n\n\nStanford Question Answering Dataset (SQuAD) is a reading comprehension dataset, consisting of questions posed by crowdworkers on a set of Wikipedia articles, where the answer to every question is a segment of text, or span, from the corresponding reading passage, or the question might be unanswerable.### Supported Tasks and Leaderboards### Languages\n\n\nDataset Structure\n-----------------### Data Instances#### plain\\_text\n\n\n* Size of downloaded dataset files: 35.14 MB\n* Size of the generated dataset: 89.92 MB\n* Total amount of disk used: 125.06 MB\n\n\nAn example of 'train' looks as follows.### Data Fields\n\n\nThe data fields are the same among all splits.#### plain\\_text\n\n\n* 'id': a 'string' feature.\n* 'title': a 'string' feature.\n* 'context': a 'string' feature.\n* 'question': a 'string' feature.\n* 'answers': a dictionary feature containing:\n\t+ 'text': a 'string' feature.\n\t+ 'answer\\_start': a 'int32' feature.### Data Splits\n\n\n\nDataset Creation\n----------------### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information\n\n\nConsiderations for Using the Data\n---------------------------------### Social Impact of Dataset### Discussion of Biases### Other Known Limitations\n\n\nAdditional Information\n----------------------### Dataset Curators### Licensing Information" ]
deee48f4c556005c0a9b97a0d7e38c39056dd714
# Dataset of tokiko (Touhou) This is the dataset of tokiko (Touhou), containing 464 images and their tags. The core tags of this character are `multicolored_hair, head_wings, horns, wings, two-tone_hair, short_hair, blue_hair, red_eyes, red_wings, white_hair, ahoge`, which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). ## List of Packages | Name | Images | Size | Download | Type | Description | |:-----------------|---------:|:-----------|:---------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------| | raw | 464 | 435.95 MiB | [Download](https://huggingface.co/datasets/CyberHarem/tokiko_touhou/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). | | 800 | 464 | 293.26 MiB | [Download](https://huggingface.co/datasets/CyberHarem/tokiko_touhou/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. | | stage3-p480-800 | 937 | 564.93 MiB | [Download](https://huggingface.co/datasets/CyberHarem/tokiko_touhou/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | | 1200 | 464 | 402.93 MiB | [Download](https://huggingface.co/datasets/CyberHarem/tokiko_touhou/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. | | stage3-p480-1200 | 937 | 726.29 MiB | [Download](https://huggingface.co/datasets/CyberHarem/tokiko_touhou/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code ```python import os import zipfile from huggingface_hub import hf_hub_download from waifuc.source import LocalSource # download raw archive file zip_file = hf_hub_download( repo_id='CyberHarem/tokiko_touhou', repo_type='dataset', filename='dataset-raw.zip', ) # extract files to your directory dataset_dir = 'dataset_dir' os.makedirs(dataset_dir, exist_ok=True) with zipfile.ZipFile(zip_file, 'r') as zf: zf.extractall(dataset_dir) # load the dataset with waifuc source = LocalSource(dataset_dir) for item in source: print(item.image, item.meta['filename'], item.meta['tags']) ``` ## List of Clusters List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | 0 | 10 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | 1girl, book, solo, smile | | 1 | 11 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | 1girl, open_mouth, smile, solo, blush | | 2 | 6 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | 1girl, book, long_sleeves, looking_at_viewer, solo, dress, open_mouth, smile | | 3 | 7 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | 1girl, bird_wings, long_sleeves, open_mouth, solo, :d, bangs, dress, holding_book, looking_at_viewer, blush, wide_sleeves | | 4 | 13 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | 1girl, long_sleeves, looking_at_viewer, simple_background, solo, bird_wings, bow, white_background, wide_sleeves, holding_book, bangs, dress, feathered_wings, full_body, standing, :d, boots, open_mouth, black_skirt, ribbon, blush, brown_footwear, shirt | | 5 | 13 | ![](samples/5/clu5-sample0.png) | ![](samples/5/clu5-sample1.png) | ![](samples/5/clu5-sample2.png) | ![](samples/5/clu5-sample3.png) | ![](samples/5/clu5-sample4.png) | 1girl, long_sleeves, solo, boots, skirt, wide_sleeves, smile, blush, book, bird_wings, bow | | 6 | 7 | ![](samples/6/clu6-sample0.png) | ![](samples/6/clu6-sample1.png) | ![](samples/6/clu6-sample2.png) | ![](samples/6/clu6-sample3.png) | ![](samples/6/clu6-sample4.png) | 1girl, hetero, nipples, solo_focus, vaginal, blush, completely_nude, cum_in_pussy, group_sex, multiple_boys, multiple_penises, open_mouth, 1boy, double_handjob, looking_at_viewer, m_legs, navel, small_breasts, smile, spread_legs, uncensored | ### Table Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 1girl | book | solo | smile | open_mouth | blush | long_sleeves | looking_at_viewer | dress | bird_wings | :d | bangs | holding_book | wide_sleeves | simple_background | bow | white_background | feathered_wings | full_body | standing | boots | black_skirt | ribbon | brown_footwear | shirt | skirt | hetero | nipples | solo_focus | vaginal | completely_nude | cum_in_pussy | group_sex | multiple_boys | multiple_penises | 1boy | double_handjob | m_legs | navel | small_breasts | spread_legs | uncensored | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------|:-------|:-------|:--------|:-------------|:--------|:---------------|:--------------------|:--------|:-------------|:-----|:--------|:---------------|:---------------|:--------------------|:------|:-------------------|:------------------|:------------|:-----------|:--------|:--------------|:---------|:-----------------|:--------|:--------|:---------|:----------|:-------------|:----------|:------------------|:---------------|:------------|:----------------|:-------------------|:-------|:-----------------|:---------|:--------|:----------------|:--------------|:-------------| | 0 | 10 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 1 | 11 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | X | | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 2 | 6 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | X | X | X | X | X | | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 3 | 7 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | X | | X | | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 4 | 13 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | X | | X | | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | 5 | 13 | ![](samples/5/clu5-sample0.png) | ![](samples/5/clu5-sample1.png) | ![](samples/5/clu5-sample2.png) | ![](samples/5/clu5-sample3.png) | ![](samples/5/clu5-sample4.png) | X | X | X | X | | X | X | | | X | | | | X | | X | | | | | X | | | | | X | | | | | | | | | | | | | | | | | | 6 | 7 | ![](samples/6/clu6-sample0.png) | ![](samples/6/clu6-sample1.png) | ![](samples/6/clu6-sample2.png) | ![](samples/6/clu6-sample3.png) | ![](samples/6/clu6-sample4.png) | X | | | X | X | X | | X | | | | | | | | | | | | | | | | | | | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X |
CyberHarem/tokiko_touhou
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2023-08-18T09:56:35+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2024-01-15T07:20:09+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of tokiko (Touhou) ========================== This is the dataset of tokiko (Touhou), containing 464 images and their tags. The core tags of this character are 'multicolored\_hair, head\_wings, horns, wings, two-tone\_hair, short\_hair, blue\_hair, red\_eyes, red\_wings, white\_hair, ahoge', which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization). List of Packages ---------------- ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code List of Clusters ---------------- List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version ### Table Version
[ "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n", "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ 44, 61, 5, 4 ]
[ "passage: TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.### Raw Text Version### Table Version" ]
2b0782870e7a9849d256fe43fa184ae533935306
# Dataset Card for "telugu_paraphrase_instruction_tune_iith" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
desik98/telugu_paraphrase_instruction_tune_iith
[ "region:us" ]
2023-08-18T09:57:39+00:00
{"dataset_info": {"features": [{"name": "inputs", "dtype": "string"}, {"name": "targets", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 140868, "num_examples": 516}], "download_size": 50573, "dataset_size": 140868}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-08-18T09:57:40+00:00
[]
[]
TAGS #region-us
# Dataset Card for "telugu_paraphrase_instruction_tune_iith" More Information needed
[ "# Dataset Card for \"telugu_paraphrase_instruction_tune_iith\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"telugu_paraphrase_instruction_tune_iith\"\n\nMore Information needed" ]
[ 6, 23 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"telugu_paraphrase_instruction_tune_iith\"\n\nMore Information needed" ]
1b35b15873612242a664c3d5db90829ff9518425
# Dataset Card for "hugging_face_telugu_paraphrase_instruction_tune" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
desik98/hugging_face_telugu_paraphrase_instruction_tune
[ "region:us" ]
2023-08-18T09:58:03+00:00
{"dataset_info": {"features": [{"name": "inputs", "dtype": "string"}, {"name": "targets", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 583873, "num_examples": 1001}], "download_size": 233840, "dataset_size": 583873}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-08-18T09:58:05+00:00
[]
[]
TAGS #region-us
# Dataset Card for "hugging_face_telugu_paraphrase_instruction_tune" More Information needed
[ "# Dataset Card for \"hugging_face_telugu_paraphrase_instruction_tune\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"hugging_face_telugu_paraphrase_instruction_tune\"\n\nMore Information needed" ]
[ 6, 25 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"hugging_face_telugu_paraphrase_instruction_tune\"\n\nMore Information needed" ]
811c21f4b7fa968275e49766f894504241326ae4
# Dataset Card for Evaluation run of YeungNLP/firefly-ziya-13b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/YeungNLP/firefly-ziya-13b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [YeungNLP/firefly-ziya-13b](https://huggingface.co/YeungNLP/firefly-ziya-13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_YeungNLP__firefly-ziya-13b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-15T21:35:49.973615](https://huggingface.co/datasets/open-llm-leaderboard/details_YeungNLP__firefly-ziya-13b/blob/main/results_2023-10-15T21-35-49.973615.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.13506711409395974, "em_stderr": 0.003500303212340629, "f1": 0.2300922818791951, "f1_stderr": 0.0037045596658242686, "acc": 0.40860790594390883, "acc_stderr": 0.00958974246754918 }, "harness|drop|3": { "em": 0.13506711409395974, "em_stderr": 0.003500303212340629, "f1": 0.2300922818791951, "f1_stderr": 0.0037045596658242686 }, "harness|gsm8k|5": { "acc": 0.06899166034874905, "acc_stderr": 0.00698099583483858 }, "harness|winogrande|5": { "acc": 0.7482241515390686, "acc_stderr": 0.012198489100259781 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_YeungNLP__firefly-ziya-13b
[ "region:us" ]
2023-08-18T10:02:10+00:00
{"pretty_name": "Evaluation run of YeungNLP/firefly-ziya-13b", "dataset_summary": "Dataset automatically created during the evaluation run of model [YeungNLP/firefly-ziya-13b](https://huggingface.co/YeungNLP/firefly-ziya-13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_YeungNLP__firefly-ziya-13b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-15T21:35:49.973615](https://huggingface.co/datasets/open-llm-leaderboard/details_YeungNLP__firefly-ziya-13b/blob/main/results_2023-10-15T21-35-49.973615.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.13506711409395974,\n \"em_stderr\": 0.003500303212340629,\n \"f1\": 0.2300922818791951,\n \"f1_stderr\": 0.0037045596658242686,\n \"acc\": 0.40860790594390883,\n \"acc_stderr\": 0.00958974246754918\n },\n \"harness|drop|3\": {\n \"em\": 0.13506711409395974,\n \"em_stderr\": 0.003500303212340629,\n \"f1\": 0.2300922818791951,\n \"f1_stderr\": 0.0037045596658242686\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.06899166034874905,\n \"acc_stderr\": 0.00698099583483858\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7482241515390686,\n \"acc_stderr\": 0.012198489100259781\n }\n}\n```", "repo_url": "https://huggingface.co/YeungNLP/firefly-ziya-13b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|arc:challenge|25_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_15T21_35_49.973615", "path": ["**/details_harness|drop|3_2023-10-15T21-35-49.973615.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-15T21-35-49.973615.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_15T21_35_49.973615", "path": ["**/details_harness|gsm8k|5_2023-10-15T21-35-49.973615.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-15T21-35-49.973615.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hellaswag|10_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-24T13:25:19.592477.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-24T13:25:19.592477.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-24T13:25:19.592477.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_15T21_35_49.973615", "path": ["**/details_harness|winogrande|5_2023-10-15T21-35-49.973615.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-15T21-35-49.973615.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_24T13_25_19.592477", "path": ["results_2023-07-24T13:25:19.592477.parquet"]}, {"split": "2023_10_15T21_35_49.973615", "path": ["results_2023-10-15T21-35-49.973615.parquet"]}, {"split": "latest", "path": ["results_2023-10-15T21-35-49.973615.parquet"]}]}]}
2023-10-15T20:36:01+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of YeungNLP/firefly-ziya-13b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model YeungNLP/firefly-ziya-13b on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-15T21:35:49.973615(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of YeungNLP/firefly-ziya-13b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model YeungNLP/firefly-ziya-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-15T21:35:49.973615(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of YeungNLP/firefly-ziya-13b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model YeungNLP/firefly-ziya-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-15T21:35:49.973615(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 20, 31, 168, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of YeungNLP/firefly-ziya-13b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model YeungNLP/firefly-ziya-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-15T21:35:49.973615(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
3049f261d22df50f1f83fde11e6a003159241347
# Dataset Card for Evaluation run of YeungNLP/firefly-llama2-13b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/YeungNLP/firefly-llama2-13b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [YeungNLP/firefly-llama2-13b](https://huggingface.co/YeungNLP/firefly-llama2-13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_YeungNLP__firefly-llama2-13b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-15T14:17:14.542868](https://huggingface.co/datasets/open-llm-leaderboard/details_YeungNLP__firefly-llama2-13b/blob/main/results_2023-10-15T14-17-14.542868.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.24842701342281878, "em_stderr": 0.004425115813837493, "f1": 0.31688024328859166, "f1_stderr": 0.0043894948502710114, "acc": 0.4294259182023645, "acc_stderr": 0.010458748786238002 }, "harness|drop|3": { "em": 0.24842701342281878, "em_stderr": 0.004425115813837493, "f1": 0.31688024328859166, "f1_stderr": 0.0043894948502710114 }, "harness|gsm8k|5": { "acc": 0.11220621683093253, "acc_stderr": 0.008693743138242376 }, "harness|winogrande|5": { "acc": 0.7466456195737964, "acc_stderr": 0.012223754434233626 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_YeungNLP__firefly-llama2-13b
[ "region:us" ]
2023-08-18T10:02:19+00:00
{"pretty_name": "Evaluation run of YeungNLP/firefly-llama2-13b", "dataset_summary": "Dataset automatically created during the evaluation run of model [YeungNLP/firefly-llama2-13b](https://huggingface.co/YeungNLP/firefly-llama2-13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_YeungNLP__firefly-llama2-13b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-15T14:17:14.542868](https://huggingface.co/datasets/open-llm-leaderboard/details_YeungNLP__firefly-llama2-13b/blob/main/results_2023-10-15T14-17-14.542868.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.24842701342281878,\n \"em_stderr\": 0.004425115813837493,\n \"f1\": 0.31688024328859166,\n \"f1_stderr\": 0.0043894948502710114,\n \"acc\": 0.4294259182023645,\n \"acc_stderr\": 0.010458748786238002\n },\n \"harness|drop|3\": {\n \"em\": 0.24842701342281878,\n \"em_stderr\": 0.004425115813837493,\n \"f1\": 0.31688024328859166,\n \"f1_stderr\": 0.0043894948502710114\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.11220621683093253,\n \"acc_stderr\": 0.008693743138242376\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7466456195737964,\n \"acc_stderr\": 0.012223754434233626\n }\n}\n```", "repo_url": "https://huggingface.co/YeungNLP/firefly-llama2-13b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|arc:challenge|25_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_15T14_17_14.542868", "path": ["**/details_harness|drop|3_2023-10-15T14-17-14.542868.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-15T14-17-14.542868.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_15T14_17_14.542868", "path": ["**/details_harness|gsm8k|5_2023-10-15T14-17-14.542868.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-15T14-17-14.542868.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hellaswag|10_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-26T14:58:55.514354.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-26T14:58:55.514354.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-26T14:58:55.514354.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_15T14_17_14.542868", "path": ["**/details_harness|winogrande|5_2023-10-15T14-17-14.542868.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-15T14-17-14.542868.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_26T14_58_55.514354", "path": ["results_2023-07-26T14:58:55.514354.parquet"]}, {"split": "2023_10_15T14_17_14.542868", "path": ["results_2023-10-15T14-17-14.542868.parquet"]}, {"split": "latest", "path": ["results_2023-10-15T14-17-14.542868.parquet"]}]}]}
2023-10-15T13:17:26+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of YeungNLP/firefly-llama2-13b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model YeungNLP/firefly-llama2-13b on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-15T14:17:14.542868(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of YeungNLP/firefly-llama2-13b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model YeungNLP/firefly-llama2-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-15T14:17:14.542868(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of YeungNLP/firefly-llama2-13b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model YeungNLP/firefly-llama2-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-15T14:17:14.542868(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 22, 31, 170, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of YeungNLP/firefly-llama2-13b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model YeungNLP/firefly-llama2-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-15T14:17:14.542868(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
d0db503613ee1c4f7cbbf8e0e275ce82d3bf6ecc
# Dataset Card for Evaluation run of YeungNLP/firefly-llama-30b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/YeungNLP/firefly-llama-30b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [YeungNLP/firefly-llama-30b](https://huggingface.co/YeungNLP/firefly-llama-30b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_YeungNLP__firefly-llama-30b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-16T19:25:26.382420](https://huggingface.co/datasets/open-llm-leaderboard/details_YeungNLP__firefly-llama-30b/blob/main/results_2023-10-16T19-25-26.382420.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.3185822147651007, "em_stderr": 0.004771524379931377, "f1": 0.39832948825503617, "f1_stderr": 0.004636207519012633, "acc": 0.46636165136703384, "acc_stderr": 0.010904050525570757 }, "harness|drop|3": { "em": 0.3185822147651007, "em_stderr": 0.004771524379931377, "f1": 0.39832948825503617, "f1_stderr": 0.004636207519012633 }, "harness|gsm8k|5": { "acc": 0.15845337376800606, "acc_stderr": 0.010058474790238966 }, "harness|winogrande|5": { "acc": 0.7742699289660616, "acc_stderr": 0.011749626260902549 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_YeungNLP__firefly-llama-30b
[ "region:us" ]
2023-08-18T10:02:27+00:00
{"pretty_name": "Evaluation run of YeungNLP/firefly-llama-30b", "dataset_summary": "Dataset automatically created during the evaluation run of model [YeungNLP/firefly-llama-30b](https://huggingface.co/YeungNLP/firefly-llama-30b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_YeungNLP__firefly-llama-30b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-16T19:25:26.382420](https://huggingface.co/datasets/open-llm-leaderboard/details_YeungNLP__firefly-llama-30b/blob/main/results_2023-10-16T19-25-26.382420.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.3185822147651007,\n \"em_stderr\": 0.004771524379931377,\n \"f1\": 0.39832948825503617,\n \"f1_stderr\": 0.004636207519012633,\n \"acc\": 0.46636165136703384,\n \"acc_stderr\": 0.010904050525570757\n },\n \"harness|drop|3\": {\n \"em\": 0.3185822147651007,\n \"em_stderr\": 0.004771524379931377,\n \"f1\": 0.39832948825503617,\n \"f1_stderr\": 0.004636207519012633\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.15845337376800606,\n \"acc_stderr\": 0.010058474790238966\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7742699289660616,\n \"acc_stderr\": 0.011749626260902549\n }\n}\n```", "repo_url": "https://huggingface.co/YeungNLP/firefly-llama-30b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|arc:challenge|25_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_16T19_25_26.382420", "path": ["**/details_harness|drop|3_2023-10-16T19-25-26.382420.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-16T19-25-26.382420.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_16T19_25_26.382420", "path": ["**/details_harness|gsm8k|5_2023-10-16T19-25-26.382420.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-16T19-25-26.382420.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hellaswag|10_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-10T11:37:52.029669.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-10T11:37:52.029669.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-10T11:37:52.029669.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_16T19_25_26.382420", "path": ["**/details_harness|winogrande|5_2023-10-16T19-25-26.382420.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-16T19-25-26.382420.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_10T11_37_52.029669", "path": ["results_2023-08-10T11:37:52.029669.parquet"]}, {"split": "2023_10_16T19_25_26.382420", "path": ["results_2023-10-16T19-25-26.382420.parquet"]}, {"split": "latest", "path": ["results_2023-10-16T19-25-26.382420.parquet"]}]}]}
2023-10-16T18:25:38+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of YeungNLP/firefly-llama-30b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model YeungNLP/firefly-llama-30b on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-16T19:25:26.382420(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of YeungNLP/firefly-llama-30b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model YeungNLP/firefly-llama-30b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-16T19:25:26.382420(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of YeungNLP/firefly-llama-30b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model YeungNLP/firefly-llama-30b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-16T19:25:26.382420(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 21, 31, 169, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of YeungNLP/firefly-llama-30b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model YeungNLP/firefly-llama-30b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-16T19:25:26.382420(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
d8e5684e648c2b7d6f1f3534a2bb6ae295a57cf8
# Dataset Card for Evaluation run of YeungNLP/firefly-llama2-13b-v1.2 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/YeungNLP/firefly-llama2-13b-v1.2 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [YeungNLP/firefly-llama2-13b-v1.2](https://huggingface.co/YeungNLP/firefly-llama2-13b-v1.2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_YeungNLP__firefly-llama2-13b-v1.2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-09-16T22:16:40.042920](https://huggingface.co/datasets/open-llm-leaderboard/details_YeungNLP__firefly-llama2-13b-v1.2/blob/main/results_2023-09-16T22-16-40.042920.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.1929530201342282, "em_stderr": 0.004041241925899649, "f1": 0.28937080536912874, "f1_stderr": 0.004092108997164026, "acc": 0.43286870958302937, "acc_stderr": 0.010534410178374885 }, "harness|drop|3": { "em": 0.1929530201342282, "em_stderr": 0.004041241925899649, "f1": 0.28937080536912874, "f1_stderr": 0.004092108997164026 }, "harness|gsm8k|5": { "acc": 0.11751326762699014, "acc_stderr": 0.008870331256489991 }, "harness|winogrande|5": { "acc": 0.7482241515390686, "acc_stderr": 0.01219848910025978 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_YeungNLP__firefly-llama2-13b-v1.2
[ "region:us" ]
2023-08-18T10:02:36+00:00
{"pretty_name": "Evaluation run of YeungNLP/firefly-llama2-13b-v1.2", "dataset_summary": "Dataset automatically created during the evaluation run of model [YeungNLP/firefly-llama2-13b-v1.2](https://huggingface.co/YeungNLP/firefly-llama2-13b-v1.2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_YeungNLP__firefly-llama2-13b-v1.2\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-09-16T22:16:40.042920](https://huggingface.co/datasets/open-llm-leaderboard/details_YeungNLP__firefly-llama2-13b-v1.2/blob/main/results_2023-09-16T22-16-40.042920.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.1929530201342282,\n \"em_stderr\": 0.004041241925899649,\n \"f1\": 0.28937080536912874,\n \"f1_stderr\": 0.004092108997164026,\n \"acc\": 0.43286870958302937,\n \"acc_stderr\": 0.010534410178374885\n },\n \"harness|drop|3\": {\n \"em\": 0.1929530201342282,\n \"em_stderr\": 0.004041241925899649,\n \"f1\": 0.28937080536912874,\n \"f1_stderr\": 0.004092108997164026\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.11751326762699014,\n \"acc_stderr\": 0.008870331256489991\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7482241515390686,\n \"acc_stderr\": 0.01219848910025978\n }\n}\n```", "repo_url": "https://huggingface.co/YeungNLP/firefly-llama2-13b-v1.2", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|arc:challenge|25_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_09_16T22_16_40.042920", "path": ["**/details_harness|drop|3_2023-09-16T22-16-40.042920.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-09-16T22-16-40.042920.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_09_16T22_16_40.042920", "path": ["**/details_harness|gsm8k|5_2023-09-16T22-16-40.042920.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-09-16T22-16-40.042920.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hellaswag|10_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-09T12:19:01.767647.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-09T12:19:01.767647.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-09T12:19:01.767647.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_09_16T22_16_40.042920", "path": ["**/details_harness|winogrande|5_2023-09-16T22-16-40.042920.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-09-16T22-16-40.042920.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_09T12_19_01.767647", "path": ["results_2023-08-09T12:19:01.767647.parquet"]}, {"split": "2023_09_16T22_16_40.042920", "path": ["results_2023-09-16T22-16-40.042920.parquet"]}, {"split": "latest", "path": ["results_2023-09-16T22-16-40.042920.parquet"]}]}]}
2023-09-16T21:16:52+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of YeungNLP/firefly-llama2-13b-v1.2 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model YeungNLP/firefly-llama2-13b-v1.2 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-09-16T22:16:40.042920(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of YeungNLP/firefly-llama2-13b-v1.2", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model YeungNLP/firefly-llama2-13b-v1.2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-16T22:16:40.042920(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of YeungNLP/firefly-llama2-13b-v1.2", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model YeungNLP/firefly-llama2-13b-v1.2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-16T22:16:40.042920(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 25, 31, 173, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of YeungNLP/firefly-llama2-13b-v1.2## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model YeungNLP/firefly-llama2-13b-v1.2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-09-16T22:16:40.042920(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
03026a98fe70c06d1a20a60d76dc33c7601fcb89
# Dataset Card for Evaluation run of YeungNLP/firefly-llama-13b-v1.2 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/YeungNLP/firefly-llama-13b-v1.2 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [YeungNLP/firefly-llama-13b-v1.2](https://huggingface.co/YeungNLP/firefly-llama-13b-v1.2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_YeungNLP__firefly-llama-13b-v1.2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-09-23T00:40:07.010521](https://huggingface.co/datasets/open-llm-leaderboard/details_YeungNLP__firefly-llama-13b-v1.2/blob/main/results_2023-09-23T00-40-07.010521.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.15614513422818793, "em_stderr": 0.0037173868511430597, "f1": 0.2505851510067118, "f1_stderr": 0.0038733830582179693, "acc": 0.4198188936752808, "acc_stderr": 0.009751908892731628 }, "harness|drop|3": { "em": 0.15614513422818793, "em_stderr": 0.0037173868511430597, "f1": 0.2505851510067118, "f1_stderr": 0.0038733830582179693 }, "harness|gsm8k|5": { "acc": 0.0803639120545868, "acc_stderr": 0.007488258573239077 }, "harness|winogrande|5": { "acc": 0.7592738752959748, "acc_stderr": 0.01201555921222418 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_YeungNLP__firefly-llama-13b-v1.2
[ "region:us" ]
2023-08-18T10:02:44+00:00
{"pretty_name": "Evaluation run of YeungNLP/firefly-llama-13b-v1.2", "dataset_summary": "Dataset automatically created during the evaluation run of model [YeungNLP/firefly-llama-13b-v1.2](https://huggingface.co/YeungNLP/firefly-llama-13b-v1.2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_YeungNLP__firefly-llama-13b-v1.2\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-09-23T00:40:07.010521](https://huggingface.co/datasets/open-llm-leaderboard/details_YeungNLP__firefly-llama-13b-v1.2/blob/main/results_2023-09-23T00-40-07.010521.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.15614513422818793,\n \"em_stderr\": 0.0037173868511430597,\n \"f1\": 0.2505851510067118,\n \"f1_stderr\": 0.0038733830582179693,\n \"acc\": 0.4198188936752808,\n \"acc_stderr\": 0.009751908892731628\n },\n \"harness|drop|3\": {\n \"em\": 0.15614513422818793,\n \"em_stderr\": 0.0037173868511430597,\n \"f1\": 0.2505851510067118,\n \"f1_stderr\": 0.0038733830582179693\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0803639120545868,\n \"acc_stderr\": 0.007488258573239077\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7592738752959748,\n \"acc_stderr\": 0.01201555921222418\n }\n}\n```", "repo_url": "https://huggingface.co/YeungNLP/firefly-llama-13b-v1.2", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|arc:challenge|25_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_09_23T00_40_07.010521", "path": ["**/details_harness|drop|3_2023-09-23T00-40-07.010521.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-09-23T00-40-07.010521.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_09_23T00_40_07.010521", "path": ["**/details_harness|gsm8k|5_2023-09-23T00-40-07.010521.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-09-23T00-40-07.010521.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hellaswag|10_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-24T14:08:16.111651.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-24T14:08:16.111651.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-24T14:08:16.111651.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_09_23T00_40_07.010521", "path": ["**/details_harness|winogrande|5_2023-09-23T00-40-07.010521.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-09-23T00-40-07.010521.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_24T14_08_16.111651", "path": ["results_2023-07-24T14:08:16.111651.parquet"]}, {"split": "2023_09_23T00_40_07.010521", "path": ["results_2023-09-23T00-40-07.010521.parquet"]}, {"split": "latest", "path": ["results_2023-09-23T00-40-07.010521.parquet"]}]}]}
2023-09-22T23:40:19+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of YeungNLP/firefly-llama-13b-v1.2 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model YeungNLP/firefly-llama-13b-v1.2 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-09-23T00:40:07.010521(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of YeungNLP/firefly-llama-13b-v1.2", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model YeungNLP/firefly-llama-13b-v1.2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-23T00:40:07.010521(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of YeungNLP/firefly-llama-13b-v1.2", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model YeungNLP/firefly-llama-13b-v1.2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-23T00:40:07.010521(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 24, 31, 172, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of YeungNLP/firefly-llama-13b-v1.2## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model YeungNLP/firefly-llama-13b-v1.2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-09-23T00:40:07.010521(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
f57ca4ea39d314974ab35e11d723e9653e3b6671
# Dataset Card for Evaluation run of YeungNLP/firefly-llama-13b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/YeungNLP/firefly-llama-13b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [YeungNLP/firefly-llama-13b](https://huggingface.co/YeungNLP/firefly-llama-13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_YeungNLP__firefly-llama-13b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-15T14:09:32.562166](https://huggingface.co/datasets/open-llm-leaderboard/details_YeungNLP__firefly-llama-13b/blob/main/results_2023-10-15T14-09-32.562166.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.12720218120805368, "em_stderr": 0.003412272897129365, "f1": 0.22689702181208146, "f1_stderr": 0.003654793246945971, "acc": 0.418998511823731, "acc_stderr": 0.009810630903312568 }, "harness|drop|3": { "em": 0.12720218120805368, "em_stderr": 0.003412272897129365, "f1": 0.22689702181208146, "f1_stderr": 0.003654793246945971 }, "harness|gsm8k|5": { "acc": 0.08188021228203184, "acc_stderr": 0.0075523385277169374 }, "harness|winogrande|5": { "acc": 0.7561168113654302, "acc_stderr": 0.012068923278908197 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_YeungNLP__firefly-llama-13b
[ "region:us" ]
2023-08-18T10:02:53+00:00
{"pretty_name": "Evaluation run of YeungNLP/firefly-llama-13b", "dataset_summary": "Dataset automatically created during the evaluation run of model [YeungNLP/firefly-llama-13b](https://huggingface.co/YeungNLP/firefly-llama-13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_YeungNLP__firefly-llama-13b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-15T14:09:32.562166](https://huggingface.co/datasets/open-llm-leaderboard/details_YeungNLP__firefly-llama-13b/blob/main/results_2023-10-15T14-09-32.562166.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.12720218120805368,\n \"em_stderr\": 0.003412272897129365,\n \"f1\": 0.22689702181208146,\n \"f1_stderr\": 0.003654793246945971,\n \"acc\": 0.418998511823731,\n \"acc_stderr\": 0.009810630903312568\n },\n \"harness|drop|3\": {\n \"em\": 0.12720218120805368,\n \"em_stderr\": 0.003412272897129365,\n \"f1\": 0.22689702181208146,\n \"f1_stderr\": 0.003654793246945971\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.08188021228203184,\n \"acc_stderr\": 0.0075523385277169374\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7561168113654302,\n \"acc_stderr\": 0.012068923278908197\n }\n}\n```", "repo_url": "https://huggingface.co/YeungNLP/firefly-llama-13b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|arc:challenge|25_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_15T14_09_32.562166", "path": ["**/details_harness|drop|3_2023-10-15T14-09-32.562166.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-15T14-09-32.562166.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_15T14_09_32.562166", "path": ["**/details_harness|gsm8k|5_2023-10-15T14-09-32.562166.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-15T14-09-32.562166.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hellaswag|10_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T18:51:43.691477.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T18:51:43.691477.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T18:51:43.691477.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_15T14_09_32.562166", "path": ["**/details_harness|winogrande|5_2023-10-15T14-09-32.562166.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-15T14-09-32.562166.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_19T18_51_43.691477", "path": ["results_2023-07-19T18:51:43.691477.parquet"]}, {"split": "2023_10_15T14_09_32.562166", "path": ["results_2023-10-15T14-09-32.562166.parquet"]}, {"split": "latest", "path": ["results_2023-10-15T14-09-32.562166.parquet"]}]}]}
2023-10-15T13:09:44+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of YeungNLP/firefly-llama-13b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model YeungNLP/firefly-llama-13b on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-15T14:09:32.562166(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of YeungNLP/firefly-llama-13b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model YeungNLP/firefly-llama-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-15T14:09:32.562166(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of YeungNLP/firefly-llama-13b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model YeungNLP/firefly-llama-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-15T14:09:32.562166(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 21, 31, 169, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of YeungNLP/firefly-llama-13b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model YeungNLP/firefly-llama-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-15T14:09:32.562166(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
0bb67d825aa39835f62913e852adb1a43e5b8a6e
# Dataset Card for Evaluation run of ehartford/WizardLM-7B-Uncensored ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/ehartford/WizardLM-7B-Uncensored - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [ehartford/WizardLM-7B-Uncensored](https://huggingface.co/ehartford/WizardLM-7B-Uncensored) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_ehartford__WizardLM-7B-Uncensored", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-18T17:40:08.208255](https://huggingface.co/datasets/open-llm-leaderboard/details_ehartford__WizardLM-7B-Uncensored/blob/main/results_2023-10-18T17-40-08.208255.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.15855704697986578, "em_stderr": 0.003740630102537942, "f1": 0.23759018456375866, "f1_stderr": 0.0038782887858402016, "acc": 0.35844703091780444, "acc_stderr": 0.008977381882470187 }, "harness|drop|3": { "em": 0.15855704697986578, "em_stderr": 0.003740630102537942, "f1": 0.23759018456375866, "f1_stderr": 0.0038782887858402016 }, "harness|gsm8k|5": { "acc": 0.032600454890068235, "acc_stderr": 0.004891669021939563 }, "harness|winogrande|5": { "acc": 0.6842936069455406, "acc_stderr": 0.01306309474300081 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_ehartford__WizardLM-7B-Uncensored
[ "region:us" ]
2023-08-18T10:03:01+00:00
{"pretty_name": "Evaluation run of ehartford/WizardLM-7B-Uncensored", "dataset_summary": "Dataset automatically created during the evaluation run of model [ehartford/WizardLM-7B-Uncensored](https://huggingface.co/ehartford/WizardLM-7B-Uncensored) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ehartford__WizardLM-7B-Uncensored\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-18T17:40:08.208255](https://huggingface.co/datasets/open-llm-leaderboard/details_ehartford__WizardLM-7B-Uncensored/blob/main/results_2023-10-18T17-40-08.208255.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.15855704697986578,\n \"em_stderr\": 0.003740630102537942,\n \"f1\": 0.23759018456375866,\n \"f1_stderr\": 0.0038782887858402016,\n \"acc\": 0.35844703091780444,\n \"acc_stderr\": 0.008977381882470187\n },\n \"harness|drop|3\": {\n \"em\": 0.15855704697986578,\n \"em_stderr\": 0.003740630102537942,\n \"f1\": 0.23759018456375866,\n \"f1_stderr\": 0.0038782887858402016\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.032600454890068235,\n \"acc_stderr\": 0.004891669021939563\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6842936069455406,\n \"acc_stderr\": 0.01306309474300081\n }\n}\n```", "repo_url": "https://huggingface.co/ehartford/WizardLM-7B-Uncensored", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|arc:challenge|25_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_18T17_40_08.208255", "path": ["**/details_harness|drop|3_2023-10-18T17-40-08.208255.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-18T17-40-08.208255.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_18T17_40_08.208255", "path": ["**/details_harness|gsm8k|5_2023-10-18T17-40-08.208255.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-18T17-40-08.208255.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hellaswag|10_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T16:58:31.980929.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T16:58:31.980929.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T16:58:31.980929.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_18T17_40_08.208255", "path": ["**/details_harness|winogrande|5_2023-10-18T17-40-08.208255.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-18T17-40-08.208255.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_19T16_58_31.980929", "path": ["results_2023-07-19T16:58:31.980929.parquet"]}, {"split": "2023_10_18T17_40_08.208255", "path": ["results_2023-10-18T17-40-08.208255.parquet"]}, {"split": "latest", "path": ["results_2023-10-18T17-40-08.208255.parquet"]}]}]}
2023-10-18T16:40:21+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of ehartford/WizardLM-7B-Uncensored ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model ehartford/WizardLM-7B-Uncensored on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-18T17:40:08.208255(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of ehartford/WizardLM-7B-Uncensored", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/WizardLM-7B-Uncensored on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-18T17:40:08.208255(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of ehartford/WizardLM-7B-Uncensored", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/WizardLM-7B-Uncensored on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-18T17:40:08.208255(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 23, 31, 171, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of ehartford/WizardLM-7B-Uncensored## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/WizardLM-7B-Uncensored on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-18T17:40:08.208255(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
68f9aa748afb386168a9a1446749d4d023b0983c
# Dataset Card for Evaluation run of ehartford/WizardLM-13B-Uncensored ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/ehartford/WizardLM-13B-Uncensored - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [ehartford/WizardLM-13B-Uncensored](https://huggingface.co/ehartford/WizardLM-13B-Uncensored) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_ehartford__WizardLM-13B-Uncensored", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-18T07:53:55.275923](https://huggingface.co/datasets/open-llm-leaderboard/details_ehartford__WizardLM-13B-Uncensored/blob/main/results_2023-10-18T07-53-55.275923.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.20994127516778524, "em_stderr": 0.004170789326061059, "f1": 0.3040310402684571, "f1_stderr": 0.004210803460550511, "acc": 0.3630369207736123, "acc_stderr": 0.00835492026013406 }, "harness|drop|3": { "em": 0.20994127516778524, "em_stderr": 0.004170789326061059, "f1": 0.3040310402684571, "f1_stderr": 0.004210803460550511 }, "harness|gsm8k|5": { "acc": 0.02047005307050796, "acc_stderr": 0.0039004133859157192 }, "harness|winogrande|5": { "acc": 0.7056037884767167, "acc_stderr": 0.0128094271343524 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_ehartford__WizardLM-13B-Uncensored
[ "region:us" ]
2023-08-18T10:03:10+00:00
{"pretty_name": "Evaluation run of ehartford/WizardLM-13B-Uncensored", "dataset_summary": "Dataset automatically created during the evaluation run of model [ehartford/WizardLM-13B-Uncensored](https://huggingface.co/ehartford/WizardLM-13B-Uncensored) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ehartford__WizardLM-13B-Uncensored\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-18T07:53:55.275923](https://huggingface.co/datasets/open-llm-leaderboard/details_ehartford__WizardLM-13B-Uncensored/blob/main/results_2023-10-18T07-53-55.275923.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.20994127516778524,\n \"em_stderr\": 0.004170789326061059,\n \"f1\": 0.3040310402684571,\n \"f1_stderr\": 0.004210803460550511,\n \"acc\": 0.3630369207736123,\n \"acc_stderr\": 0.00835492026013406\n },\n \"harness|drop|3\": {\n \"em\": 0.20994127516778524,\n \"em_stderr\": 0.004170789326061059,\n \"f1\": 0.3040310402684571,\n \"f1_stderr\": 0.004210803460550511\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.02047005307050796,\n \"acc_stderr\": 0.0039004133859157192\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7056037884767167,\n \"acc_stderr\": 0.0128094271343524\n }\n}\n```", "repo_url": "https://huggingface.co/ehartford/WizardLM-13B-Uncensored", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|arc:challenge|25_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_18T07_53_55.275923", "path": ["**/details_harness|drop|3_2023-10-18T07-53-55.275923.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-18T07-53-55.275923.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_18T07_53_55.275923", "path": ["**/details_harness|gsm8k|5_2023-10-18T07-53-55.275923.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-18T07-53-55.275923.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hellaswag|10_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T19:00:32.745864.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T19:00:32.745864.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T19:00:32.745864.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_18T07_53_55.275923", "path": ["**/details_harness|winogrande|5_2023-10-18T07-53-55.275923.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-18T07-53-55.275923.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_19T19_00_32.745864", "path": ["results_2023-07-19T19:00:32.745864.parquet"]}, {"split": "2023_10_18T07_53_55.275923", "path": ["results_2023-10-18T07-53-55.275923.parquet"]}, {"split": "latest", "path": ["results_2023-10-18T07-53-55.275923.parquet"]}]}]}
2023-10-18T06:54:07+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of ehartford/WizardLM-13B-Uncensored ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model ehartford/WizardLM-13B-Uncensored on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-18T07:53:55.275923(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of ehartford/WizardLM-13B-Uncensored", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/WizardLM-13B-Uncensored on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-18T07:53:55.275923(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of ehartford/WizardLM-13B-Uncensored", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/WizardLM-13B-Uncensored on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-18T07:53:55.275923(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 23, 31, 171, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of ehartford/WizardLM-13B-Uncensored## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/WizardLM-13B-Uncensored on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-18T07:53:55.275923(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
8e9f5378207f733ef17aedb4f8ddc81e49c03069
# Dataset Card for Evaluation run of ehartford/WizardLM-1.0-Uncensored-Llama2-13b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/ehartford/WizardLM-1.0-Uncensored-Llama2-13b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [ehartford/WizardLM-1.0-Uncensored-Llama2-13b](https://huggingface.co/ehartford/WizardLM-1.0-Uncensored-Llama2-13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_ehartford__WizardLM-1.0-Uncensored-Llama2-13b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-22T09:23:28.206908](https://huggingface.co/datasets/open-llm-leaderboard/details_ehartford__WizardLM-1.0-Uncensored-Llama2-13b/blob/main/results_2023-10-22T09-23-28.206908.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.07403523489932885, "em_stderr": 0.0026813660805584437, "f1": 0.1393938758389259, "f1_stderr": 0.002927612388923708, "acc": 0.43689851379839195, "acc_stderr": 0.010827222471217795 }, "harness|drop|3": { "em": 0.07403523489932885, "em_stderr": 0.0026813660805584437, "f1": 0.1393938758389259, "f1_stderr": 0.002927612388923708 }, "harness|gsm8k|5": { "acc": 0.1326762699014405, "acc_stderr": 0.009343929131442217 }, "harness|winogrande|5": { "acc": 0.7411207576953434, "acc_stderr": 0.012310515810993372 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_ehartford__WizardLM-1.0-Uncensored-Llama2-13b
[ "region:us" ]
2023-08-18T10:03:18+00:00
{"pretty_name": "Evaluation run of ehartford/WizardLM-1.0-Uncensored-Llama2-13b", "dataset_summary": "Dataset automatically created during the evaluation run of model [ehartford/WizardLM-1.0-Uncensored-Llama2-13b](https://huggingface.co/ehartford/WizardLM-1.0-Uncensored-Llama2-13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ehartford__WizardLM-1.0-Uncensored-Llama2-13b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-22T09:23:28.206908](https://huggingface.co/datasets/open-llm-leaderboard/details_ehartford__WizardLM-1.0-Uncensored-Llama2-13b/blob/main/results_2023-10-22T09-23-28.206908.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.07403523489932885,\n \"em_stderr\": 0.0026813660805584437,\n \"f1\": 0.1393938758389259,\n \"f1_stderr\": 0.002927612388923708,\n \"acc\": 0.43689851379839195,\n \"acc_stderr\": 0.010827222471217795\n },\n \"harness|drop|3\": {\n \"em\": 0.07403523489932885,\n \"em_stderr\": 0.0026813660805584437,\n \"f1\": 0.1393938758389259,\n \"f1_stderr\": 0.002927612388923708\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.1326762699014405,\n \"acc_stderr\": 0.009343929131442217\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7411207576953434,\n \"acc_stderr\": 0.012310515810993372\n }\n}\n```", "repo_url": "https://huggingface.co/ehartford/WizardLM-1.0-Uncensored-Llama2-13b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|arc:challenge|25_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|arc:challenge|25_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_21T18_02_33.416249", "path": ["**/details_harness|drop|3_2023-10-21T18-02-33.416249.parquet"]}, {"split": "2023_10_22T09_23_28.206908", "path": ["**/details_harness|drop|3_2023-10-22T09-23-28.206908.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-22T09-23-28.206908.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_21T18_02_33.416249", "path": ["**/details_harness|gsm8k|5_2023-10-21T18-02-33.416249.parquet"]}, {"split": "2023_10_22T09_23_28.206908", "path": ["**/details_harness|gsm8k|5_2023-10-22T09-23-28.206908.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-22T09-23-28.206908.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hellaswag|10_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hellaswag|10_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-09T13:52:58.129270.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-09T13:58:22.615807.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-09T13:58:22.615807.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-09T13:58:22.615807.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_21T18_02_33.416249", "path": ["**/details_harness|winogrande|5_2023-10-21T18-02-33.416249.parquet"]}, {"split": "2023_10_22T09_23_28.206908", "path": ["**/details_harness|winogrande|5_2023-10-22T09-23-28.206908.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-22T09-23-28.206908.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_09T13_52_58.129270", "path": ["results_2023-08-09T13:52:58.129270.parquet"]}, {"split": "2023_08_09T13_58_22.615807", "path": ["results_2023-08-09T13:58:22.615807.parquet"]}, {"split": "2023_10_21T18_02_33.416249", "path": ["results_2023-10-21T18-02-33.416249.parquet"]}, {"split": "2023_10_22T09_23_28.206908", "path": ["results_2023-10-22T09-23-28.206908.parquet"]}, {"split": "latest", "path": ["results_2023-10-22T09-23-28.206908.parquet"]}]}]}
2023-10-22T08:23:40+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of ehartford/WizardLM-1.0-Uncensored-Llama2-13b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model ehartford/WizardLM-1.0-Uncensored-Llama2-13b on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-22T09:23:28.206908(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of ehartford/WizardLM-1.0-Uncensored-Llama2-13b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/WizardLM-1.0-Uncensored-Llama2-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-22T09:23:28.206908(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of ehartford/WizardLM-1.0-Uncensored-Llama2-13b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/WizardLM-1.0-Uncensored-Llama2-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-22T09:23:28.206908(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 29, 31, 177, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of ehartford/WizardLM-1.0-Uncensored-Llama2-13b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/WizardLM-1.0-Uncensored-Llama2-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-22T09:23:28.206908(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
288a5ebd2445a27c6d2594a8e5460c136f4ddfbb
# Dataset Card for Evaluation run of ehartford/Wizard-Vicuna-13B-Uncensored ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/ehartford/Wizard-Vicuna-13B-Uncensored - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [ehartford/Wizard-Vicuna-13B-Uncensored](https://huggingface.co/ehartford/Wizard-Vicuna-13B-Uncensored) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_ehartford__Wizard-Vicuna-13B-Uncensored", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-18T17:43:02.527324](https://huggingface.co/datasets/open-llm-leaderboard/details_ehartford__Wizard-Vicuna-13B-Uncensored/blob/main/results_2023-10-18T17-43-02.527324.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.14314177852348994, "em_stderr": 0.0035865537174832513, "f1": 0.2178586409395965, "f1_stderr": 0.003730334446277459, "acc": 0.4216675951562166, "acc_stderr": 0.00989785498376742 }, "harness|drop|3": { "em": 0.14314177852348994, "em_stderr": 0.0035865537174832513, "f1": 0.2178586409395965, "f1_stderr": 0.003730334446277459 }, "harness|gsm8k|5": { "acc": 0.08642911296436695, "acc_stderr": 0.0077400443371038056 }, "harness|winogrande|5": { "acc": 0.7569060773480663, "acc_stderr": 0.012055665630431032 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_ehartford__Wizard-Vicuna-13B-Uncensored
[ "region:us" ]
2023-08-18T10:03:35+00:00
{"pretty_name": "Evaluation run of ehartford/Wizard-Vicuna-13B-Uncensored", "dataset_summary": "Dataset automatically created during the evaluation run of model [ehartford/Wizard-Vicuna-13B-Uncensored](https://huggingface.co/ehartford/Wizard-Vicuna-13B-Uncensored) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ehartford__Wizard-Vicuna-13B-Uncensored\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-18T17:43:02.527324](https://huggingface.co/datasets/open-llm-leaderboard/details_ehartford__Wizard-Vicuna-13B-Uncensored/blob/main/results_2023-10-18T17-43-02.527324.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.14314177852348994,\n \"em_stderr\": 0.0035865537174832513,\n \"f1\": 0.2178586409395965,\n \"f1_stderr\": 0.003730334446277459,\n \"acc\": 0.4216675951562166,\n \"acc_stderr\": 0.00989785498376742\n },\n \"harness|drop|3\": {\n \"em\": 0.14314177852348994,\n \"em_stderr\": 0.0035865537174832513,\n \"f1\": 0.2178586409395965,\n \"f1_stderr\": 0.003730334446277459\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.08642911296436695,\n \"acc_stderr\": 0.0077400443371038056\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7569060773480663,\n \"acc_stderr\": 0.012055665630431032\n }\n}\n```", "repo_url": "https://huggingface.co/ehartford/Wizard-Vicuna-13B-Uncensored", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|arc:challenge|25_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_18T17_43_02.527324", "path": ["**/details_harness|drop|3_2023-10-18T17-43-02.527324.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-18T17-43-02.527324.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_18T17_43_02.527324", "path": ["**/details_harness|gsm8k|5_2023-10-18T17-43-02.527324.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-18T17-43-02.527324.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hellaswag|10_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T19:11:03.287932.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T19:11:03.287932.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T19:11:03.287932.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_18T17_43_02.527324", "path": ["**/details_harness|winogrande|5_2023-10-18T17-43-02.527324.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-18T17-43-02.527324.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_19T19_11_03.287932", "path": ["results_2023-07-19T19:11:03.287932.parquet"]}, {"split": "2023_10_18T17_43_02.527324", "path": ["results_2023-10-18T17-43-02.527324.parquet"]}, {"split": "latest", "path": ["results_2023-10-18T17-43-02.527324.parquet"]}]}]}
2023-10-18T16:43:16+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of ehartford/Wizard-Vicuna-13B-Uncensored ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model ehartford/Wizard-Vicuna-13B-Uncensored on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-18T17:43:02.527324(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of ehartford/Wizard-Vicuna-13B-Uncensored", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/Wizard-Vicuna-13B-Uncensored on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-18T17:43:02.527324(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of ehartford/Wizard-Vicuna-13B-Uncensored", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/Wizard-Vicuna-13B-Uncensored on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-18T17:43:02.527324(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 26, 31, 174, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of ehartford/Wizard-Vicuna-13B-Uncensored## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/Wizard-Vicuna-13B-Uncensored on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-18T17:43:02.527324(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
b9ad33c0a8ba895ab7de8dc2a0f7610bac0f2e47
# Dataset Card for Evaluation run of ehartford/Wizard-Vicuna-7B-Uncensored ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/ehartford/Wizard-Vicuna-7B-Uncensored - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [ehartford/Wizard-Vicuna-7B-Uncensored](https://huggingface.co/ehartford/Wizard-Vicuna-7B-Uncensored) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_ehartford__Wizard-Vicuna-7B-Uncensored", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-18T07:04:55.060331](https://huggingface.co/datasets/open-llm-leaderboard/details_ehartford__Wizard-Vicuna-7B-Uncensored/blob/main/results_2023-10-18T07-04-55.060331.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.18036912751677853, "em_stderr": 0.003937584689736024, "f1": 0.23801803691275183, "f1_stderr": 0.003988701736112215, "acc": 0.3838336904677134, "acc_stderr": 0.009164287920296908 }, "harness|drop|3": { "em": 0.18036912751677853, "em_stderr": 0.003937584689736024, "f1": 0.23801803691275183, "f1_stderr": 0.003988701736112215 }, "harness|gsm8k|5": { "acc": 0.045489006823351025, "acc_stderr": 0.005739657656722215 }, "harness|winogrande|5": { "acc": 0.7221783741120757, "acc_stderr": 0.012588918183871601 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_ehartford__Wizard-Vicuna-7B-Uncensored
[ "region:us" ]
2023-08-18T10:03:44+00:00
{"pretty_name": "Evaluation run of ehartford/Wizard-Vicuna-7B-Uncensored", "dataset_summary": "Dataset automatically created during the evaluation run of model [ehartford/Wizard-Vicuna-7B-Uncensored](https://huggingface.co/ehartford/Wizard-Vicuna-7B-Uncensored) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ehartford__Wizard-Vicuna-7B-Uncensored\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-18T07:04:55.060331](https://huggingface.co/datasets/open-llm-leaderboard/details_ehartford__Wizard-Vicuna-7B-Uncensored/blob/main/results_2023-10-18T07-04-55.060331.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.18036912751677853,\n \"em_stderr\": 0.003937584689736024,\n \"f1\": 0.23801803691275183,\n \"f1_stderr\": 0.003988701736112215,\n \"acc\": 0.3838336904677134,\n \"acc_stderr\": 0.009164287920296908\n },\n \"harness|drop|3\": {\n \"em\": 0.18036912751677853,\n \"em_stderr\": 0.003937584689736024,\n \"f1\": 0.23801803691275183,\n \"f1_stderr\": 0.003988701736112215\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.045489006823351025,\n \"acc_stderr\": 0.005739657656722215\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7221783741120757,\n \"acc_stderr\": 0.012588918183871601\n }\n}\n```", "repo_url": "https://huggingface.co/ehartford/Wizard-Vicuna-7B-Uncensored", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|arc:challenge|25_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_18T07_04_55.060331", "path": ["**/details_harness|drop|3_2023-10-18T07-04-55.060331.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-18T07-04-55.060331.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_18T07_04_55.060331", "path": ["**/details_harness|gsm8k|5_2023-10-18T07-04-55.060331.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-18T07-04-55.060331.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hellaswag|10_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T17:04:57.410493.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T17:04:57.410493.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T17:04:57.410493.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_18T07_04_55.060331", "path": ["**/details_harness|winogrande|5_2023-10-18T07-04-55.060331.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-18T07-04-55.060331.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_19T17_04_57.410493", "path": ["results_2023-07-19T17:04:57.410493.parquet"]}, {"split": "2023_10_18T07_04_55.060331", "path": ["results_2023-10-18T07-04-55.060331.parquet"]}, {"split": "latest", "path": ["results_2023-10-18T07-04-55.060331.parquet"]}]}]}
2023-10-18T06:05:07+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of ehartford/Wizard-Vicuna-7B-Uncensored ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model ehartford/Wizard-Vicuna-7B-Uncensored on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-18T07:04:55.060331(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of ehartford/Wizard-Vicuna-7B-Uncensored", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/Wizard-Vicuna-7B-Uncensored on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-18T07:04:55.060331(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of ehartford/Wizard-Vicuna-7B-Uncensored", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/Wizard-Vicuna-7B-Uncensored on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-18T07:04:55.060331(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 26, 31, 174, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of ehartford/Wizard-Vicuna-7B-Uncensored## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/Wizard-Vicuna-7B-Uncensored on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-18T07:04:55.060331(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
509b42efeda1231c10aa24ca91f64951691413ba
# Dataset Card for Evaluation run of ehartford/based-30b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/ehartford/based-30b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [ehartford/based-30b](https://huggingface.co/ehartford/based-30b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_ehartford__based-30b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-15T18:28:19.982854](https://huggingface.co/datasets/open-llm-leaderboard/details_ehartford__based-30b/blob/main/results_2023-10-15T18-28-19.982854.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.4554320469798658, "em_stderr": 0.005100085829229459, "f1": 0.5021749161073836, "f1_stderr": 0.004912415353294426, "acc": 0.4020687864152903, "acc_stderr": 0.006366601766869923 }, "harness|drop|3": { "em": 0.4554320469798658, "em_stderr": 0.005100085829229459, "f1": 0.5021749161073836, "f1_stderr": 0.004912415353294426 }, "harness|gsm8k|5": { "acc": 0.003032600454890068, "acc_stderr": 0.0015145735612245449 }, "harness|winogrande|5": { "acc": 0.8011049723756906, "acc_stderr": 0.011218629972515302 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_ehartford__based-30b
[ "region:us" ]
2023-08-18T10:03:52+00:00
{"pretty_name": "Evaluation run of ehartford/based-30b", "dataset_summary": "Dataset automatically created during the evaluation run of model [ehartford/based-30b](https://huggingface.co/ehartford/based-30b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ehartford__based-30b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-15T18:28:19.982854](https://huggingface.co/datasets/open-llm-leaderboard/details_ehartford__based-30b/blob/main/results_2023-10-15T18-28-19.982854.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.4554320469798658,\n \"em_stderr\": 0.005100085829229459,\n \"f1\": 0.5021749161073836,\n \"f1_stderr\": 0.004912415353294426,\n \"acc\": 0.4020687864152903,\n \"acc_stderr\": 0.006366601766869923\n },\n \"harness|drop|3\": {\n \"em\": 0.4554320469798658,\n \"em_stderr\": 0.005100085829229459,\n \"f1\": 0.5021749161073836,\n \"f1_stderr\": 0.004912415353294426\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.003032600454890068,\n \"acc_stderr\": 0.0015145735612245449\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8011049723756906,\n \"acc_stderr\": 0.011218629972515302\n }\n}\n```", "repo_url": "https://huggingface.co/ehartford/based-30b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|arc:challenge|25_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_15T18_28_19.982854", "path": ["**/details_harness|drop|3_2023-10-15T18-28-19.982854.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-15T18-28-19.982854.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_15T18_28_19.982854", "path": ["**/details_harness|gsm8k|5_2023-10-15T18-28-19.982854.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-15T18-28-19.982854.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hellaswag|10_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T22:36:40.245658.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T22:36:40.245658.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T22:36:40.245658.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_15T18_28_19.982854", "path": ["**/details_harness|winogrande|5_2023-10-15T18-28-19.982854.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-15T18-28-19.982854.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_19T22_36_40.245658", "path": ["results_2023-07-19T22:36:40.245658.parquet"]}, {"split": "2023_10_15T18_28_19.982854", "path": ["results_2023-10-15T18-28-19.982854.parquet"]}, {"split": "latest", "path": ["results_2023-10-15T18-28-19.982854.parquet"]}]}]}
2023-10-15T17:28:32+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of ehartford/based-30b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model ehartford/based-30b on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-15T18:28:19.982854(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of ehartford/based-30b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/based-30b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-15T18:28:19.982854(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of ehartford/based-30b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/based-30b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-15T18:28:19.982854(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 16, 31, 164, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of ehartford/based-30b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/based-30b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-15T18:28:19.982854(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
1505664920690651c3fadb76bb9b7fa36fa8c54d
# Dataset Card for Evaluation run of ehartford/dolphin-llama2-7b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/ehartford/dolphin-llama2-7b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [ehartford/dolphin-llama2-7b](https://huggingface.co/ehartford/dolphin-llama2-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_ehartford__dolphin-llama2-7b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-22T06:19:33.334508](https://huggingface.co/datasets/open-llm-leaderboard/details_ehartford__dolphin-llama2-7b/blob/main/results_2023-10-22T06-19-33.334508.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.03387164429530201, "em_stderr": 0.0018525724686931102, "f1": 0.11525377516778465, "f1_stderr": 0.0024425047696980885, "acc": 0.34729408624959834, "acc_stderr": 0.009943822859176079 }, "harness|drop|3": { "em": 0.03387164429530201, "em_stderr": 0.0018525724686931102, "f1": 0.11525377516778465, "f1_stderr": 0.0024425047696980885 }, "harness|gsm8k|5": { "acc": 0.05686125852918878, "acc_stderr": 0.006378790242099651 }, "harness|winogrande|5": { "acc": 0.6377269139700079, "acc_stderr": 0.013508855476252508 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_ehartford__dolphin-llama2-7b
[ "region:us" ]
2023-08-18T10:04:01+00:00
{"pretty_name": "Evaluation run of ehartford/dolphin-llama2-7b", "dataset_summary": "Dataset automatically created during the evaluation run of model [ehartford/dolphin-llama2-7b](https://huggingface.co/ehartford/dolphin-llama2-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ehartford__dolphin-llama2-7b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-22T06:19:33.334508](https://huggingface.co/datasets/open-llm-leaderboard/details_ehartford__dolphin-llama2-7b/blob/main/results_2023-10-22T06-19-33.334508.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.03387164429530201,\n \"em_stderr\": 0.0018525724686931102,\n \"f1\": 0.11525377516778465,\n \"f1_stderr\": 0.0024425047696980885,\n \"acc\": 0.34729408624959834,\n \"acc_stderr\": 0.009943822859176079\n },\n \"harness|drop|3\": {\n \"em\": 0.03387164429530201,\n \"em_stderr\": 0.0018525724686931102,\n \"f1\": 0.11525377516778465,\n \"f1_stderr\": 0.0024425047696980885\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.05686125852918878,\n \"acc_stderr\": 0.006378790242099651\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6377269139700079,\n \"acc_stderr\": 0.013508855476252508\n }\n}\n```", "repo_url": "https://huggingface.co/ehartford/dolphin-llama2-7b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|arc:challenge|25_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_22T06_19_33.334508", "path": ["**/details_harness|drop|3_2023-10-22T06-19-33.334508.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-22T06-19-33.334508.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_22T06_19_33.334508", "path": ["**/details_harness|gsm8k|5_2023-10-22T06-19-33.334508.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-22T06-19-33.334508.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hellaswag|10_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-09T13:40:29.466575.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-09T13:40:29.466575.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-09T13:40:29.466575.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_22T06_19_33.334508", "path": ["**/details_harness|winogrande|5_2023-10-22T06-19-33.334508.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-22T06-19-33.334508.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_09T13_40_29.466575", "path": ["results_2023-08-09T13:40:29.466575.parquet"]}, {"split": "2023_10_22T06_19_33.334508", "path": ["results_2023-10-22T06-19-33.334508.parquet"]}, {"split": "latest", "path": ["results_2023-10-22T06-19-33.334508.parquet"]}]}]}
2023-10-22T05:19:45+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of ehartford/dolphin-llama2-7b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model ehartford/dolphin-llama2-7b on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-22T06:19:33.334508(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of ehartford/dolphin-llama2-7b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/dolphin-llama2-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-22T06:19:33.334508(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of ehartford/dolphin-llama2-7b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/dolphin-llama2-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-22T06:19:33.334508(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 22, 31, 170, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of ehartford/dolphin-llama2-7b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/dolphin-llama2-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-22T06:19:33.334508(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
e4c32d706e823b722965da0ec5f178998f956bbb
# Dataset Card for Evaluation run of ehartford/dolphin-llama-13b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/ehartford/dolphin-llama-13b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [ehartford/dolphin-llama-13b](https://huggingface.co/ehartford/dolphin-llama-13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_ehartford__dolphin-llama-13b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-22T08:31:06.423580](https://huggingface.co/datasets/open-llm-leaderboard/details_ehartford__dolphin-llama-13b/blob/main/results_2023-10-22T08-31-06.423580.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.09867869127516779, "em_stderr": 0.003054155613095951, "f1": 0.1882760067114087, "f1_stderr": 0.0033481950499125467, "acc": 0.42166909111145284, "acc_stderr": 0.011280060733885005 }, "harness|drop|3": { "em": 0.09867869127516779, "em_stderr": 0.003054155613095951, "f1": 0.1882760067114087, "f1_stderr": 0.0033481950499125467 }, "harness|gsm8k|5": { "acc": 0.14404852160727824, "acc_stderr": 0.009672110973065284 }, "harness|winogrande|5": { "acc": 0.6992896606156275, "acc_stderr": 0.012888010494704725 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_ehartford__dolphin-llama-13b
[ "region:us" ]
2023-08-18T10:04:09+00:00
{"pretty_name": "Evaluation run of ehartford/dolphin-llama-13b", "dataset_summary": "Dataset automatically created during the evaluation run of model [ehartford/dolphin-llama-13b](https://huggingface.co/ehartford/dolphin-llama-13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ehartford__dolphin-llama-13b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-22T08:31:06.423580](https://huggingface.co/datasets/open-llm-leaderboard/details_ehartford__dolphin-llama-13b/blob/main/results_2023-10-22T08-31-06.423580.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.09867869127516779,\n \"em_stderr\": 0.003054155613095951,\n \"f1\": 0.1882760067114087,\n \"f1_stderr\": 0.0033481950499125467,\n \"acc\": 0.42166909111145284,\n \"acc_stderr\": 0.011280060733885005\n },\n \"harness|drop|3\": {\n \"em\": 0.09867869127516779,\n \"em_stderr\": 0.003054155613095951,\n \"f1\": 0.1882760067114087,\n \"f1_stderr\": 0.0033481950499125467\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.14404852160727824,\n \"acc_stderr\": 0.009672110973065284\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6992896606156275,\n \"acc_stderr\": 0.012888010494704725\n }\n}\n```", "repo_url": "https://huggingface.co/ehartford/dolphin-llama-13b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|arc:challenge|25_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|arc:challenge|25_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_22T01_25_54.857200", "path": ["**/details_harness|drop|3_2023-10-22T01-25-54.857200.parquet"]}, {"split": "2023_10_22T08_31_06.423580", "path": ["**/details_harness|drop|3_2023-10-22T08-31-06.423580.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-22T08-31-06.423580.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_22T01_25_54.857200", "path": ["**/details_harness|gsm8k|5_2023-10-22T01-25-54.857200.parquet"]}, {"split": "2023_10_22T08_31_06.423580", "path": ["**/details_harness|gsm8k|5_2023-10-22T08-31-06.423580.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-22T08-31-06.423580.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hellaswag|10_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hellaswag|10_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-24T12:30:40.142317.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-24T16:19:11.269492.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-24T16:19:11.269492.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-24T16:19:11.269492.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_22T01_25_54.857200", "path": ["**/details_harness|winogrande|5_2023-10-22T01-25-54.857200.parquet"]}, {"split": "2023_10_22T08_31_06.423580", "path": ["**/details_harness|winogrande|5_2023-10-22T08-31-06.423580.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-22T08-31-06.423580.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_24T12_30_40.142317", "path": ["results_2023-07-24T12:30:40.142317.parquet"]}, {"split": "2023_07_24T16_19_11.269492", "path": ["results_2023-07-24T16:19:11.269492.parquet"]}, {"split": "2023_10_22T01_25_54.857200", "path": ["results_2023-10-22T01-25-54.857200.parquet"]}, {"split": "2023_10_22T08_31_06.423580", "path": ["results_2023-10-22T08-31-06.423580.parquet"]}, {"split": "latest", "path": ["results_2023-10-22T08-31-06.423580.parquet"]}]}]}
2023-10-22T07:31:14+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of ehartford/dolphin-llama-13b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model ehartford/dolphin-llama-13b on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-22T08:31:06.423580(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of ehartford/dolphin-llama-13b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/dolphin-llama-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-22T08:31:06.423580(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of ehartford/dolphin-llama-13b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/dolphin-llama-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-22T08:31:06.423580(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 21, 31, 169, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of ehartford/dolphin-llama-13b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/dolphin-llama-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-22T08:31:06.423580(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
6447b081fdf93fc2f0512991dabff86cc51012a5
# Dataset Card for Evaluation run of ehartford/Wizard-Vicuna-30B-Uncensored ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/ehartford/Wizard-Vicuna-30B-Uncensored - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [ehartford/Wizard-Vicuna-30B-Uncensored](https://huggingface.co/ehartford/Wizard-Vicuna-30B-Uncensored) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_ehartford__Wizard-Vicuna-30B-Uncensored", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-18T12:57:01.368480](https://huggingface.co/datasets/open-llm-leaderboard/details_ehartford__Wizard-Vicuna-30B-Uncensored/blob/main/results_2023-10-18T12-57-01.368480.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.18162751677852348, "em_stderr": 0.0039482621737543045, "f1": 0.2674087667785243, "f1_stderr": 0.004012090110572664, "acc": 0.46353130406008236, "acc_stderr": 0.01059244186586655 }, "harness|drop|3": { "em": 0.18162751677852348, "em_stderr": 0.0039482621737543045, "f1": 0.2674087667785243, "f1_stderr": 0.004012090110572664 }, "harness|gsm8k|5": { "acc": 0.1425322213798332, "acc_stderr": 0.009629588445673819 }, "harness|winogrande|5": { "acc": 0.7845303867403315, "acc_stderr": 0.011555295286059279 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_ehartford__Wizard-Vicuna-30B-Uncensored
[ "region:us" ]
2023-08-18T10:04:20+00:00
{"pretty_name": "Evaluation run of ehartford/Wizard-Vicuna-30B-Uncensored", "dataset_summary": "Dataset automatically created during the evaluation run of model [ehartford/Wizard-Vicuna-30B-Uncensored](https://huggingface.co/ehartford/Wizard-Vicuna-30B-Uncensored) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ehartford__Wizard-Vicuna-30B-Uncensored\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-18T12:57:01.368480](https://huggingface.co/datasets/open-llm-leaderboard/details_ehartford__Wizard-Vicuna-30B-Uncensored/blob/main/results_2023-10-18T12-57-01.368480.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.18162751677852348,\n \"em_stderr\": 0.0039482621737543045,\n \"f1\": 0.2674087667785243,\n \"f1_stderr\": 0.004012090110572664,\n \"acc\": 0.46353130406008236,\n \"acc_stderr\": 0.01059244186586655\n },\n \"harness|drop|3\": {\n \"em\": 0.18162751677852348,\n \"em_stderr\": 0.0039482621737543045,\n \"f1\": 0.2674087667785243,\n \"f1_stderr\": 0.004012090110572664\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.1425322213798332,\n \"acc_stderr\": 0.009629588445673819\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7845303867403315,\n \"acc_stderr\": 0.011555295286059279\n }\n}\n```", "repo_url": "https://huggingface.co/ehartford/Wizard-Vicuna-30B-Uncensored", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|arc:challenge|25_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_18T12_57_01.368480", "path": ["**/details_harness|drop|3_2023-10-18T12-57-01.368480.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-18T12-57-01.368480.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_18T12_57_01.368480", "path": ["**/details_harness|gsm8k|5_2023-10-18T12-57-01.368480.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-18T12-57-01.368480.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hellaswag|10_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T22:31:27.283689.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T22:31:27.283689.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T22:31:27.283689.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_18T12_57_01.368480", "path": ["**/details_harness|winogrande|5_2023-10-18T12-57-01.368480.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-18T12-57-01.368480.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_19T22_31_27.283689", "path": ["results_2023-07-19T22:31:27.283689.parquet"]}, {"split": "2023_10_18T12_57_01.368480", "path": ["results_2023-10-18T12-57-01.368480.parquet"]}, {"split": "latest", "path": ["results_2023-10-18T12-57-01.368480.parquet"]}]}]}
2023-10-18T11:57:14+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of ehartford/Wizard-Vicuna-30B-Uncensored ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model ehartford/Wizard-Vicuna-30B-Uncensored on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-18T12:57:01.368480(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of ehartford/Wizard-Vicuna-30B-Uncensored", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/Wizard-Vicuna-30B-Uncensored on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-18T12:57:01.368480(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of ehartford/Wizard-Vicuna-30B-Uncensored", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/Wizard-Vicuna-30B-Uncensored on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-18T12:57:01.368480(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 26, 31, 174, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of ehartford/Wizard-Vicuna-30B-Uncensored## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/Wizard-Vicuna-30B-Uncensored on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-18T12:57:01.368480(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
bb1447e63219d8e670b4861250a70fbaf5923567
# Dataset Card for Evaluation run of ehartford/WizardLM-33B-V1.0-Uncensored ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/ehartford/WizardLM-33B-V1.0-Uncensored - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [ehartford/WizardLM-33B-V1.0-Uncensored](https://huggingface.co/ehartford/WizardLM-33B-V1.0-Uncensored) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_ehartford__WizardLM-33B-V1.0-Uncensored", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-12T23:21:17.619828](https://huggingface.co/datasets/open-llm-leaderboard/details_ehartford__WizardLM-33B-V1.0-Uncensored/blob/main/results_2023-10-12T23-21-17.619828.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.13328439597315436, "em_stderr": 0.0034807081740792067, "f1": 0.20888108221476515, "f1_stderr": 0.003634426964391504, "acc": 0.48157132744485465, "acc_stderr": 0.01121741880244755 }, "harness|drop|3": { "em": 0.13328439597315436, "em_stderr": 0.0034807081740792067, "f1": 0.20888108221476515, "f1_stderr": 0.003634426964391504 }, "harness|gsm8k|5": { "acc": 0.1865049279757392, "acc_stderr": 0.010729140039689902 }, "harness|winogrande|5": { "acc": 0.77663772691397, "acc_stderr": 0.011705697565205198 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_ehartford__WizardLM-33B-V1.0-Uncensored
[ "region:us" ]
2023-08-18T10:04:29+00:00
{"pretty_name": "Evaluation run of ehartford/WizardLM-33B-V1.0-Uncensored", "dataset_summary": "Dataset automatically created during the evaluation run of model [ehartford/WizardLM-33B-V1.0-Uncensored](https://huggingface.co/ehartford/WizardLM-33B-V1.0-Uncensored) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ehartford__WizardLM-33B-V1.0-Uncensored\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-12T23:21:17.619828](https://huggingface.co/datasets/open-llm-leaderboard/details_ehartford__WizardLM-33B-V1.0-Uncensored/blob/main/results_2023-10-12T23-21-17.619828.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.13328439597315436,\n \"em_stderr\": 0.0034807081740792067,\n \"f1\": 0.20888108221476515,\n \"f1_stderr\": 0.003634426964391504,\n \"acc\": 0.48157132744485465,\n \"acc_stderr\": 0.01121741880244755\n },\n \"harness|drop|3\": {\n \"em\": 0.13328439597315436,\n \"em_stderr\": 0.0034807081740792067,\n \"f1\": 0.20888108221476515,\n \"f1_stderr\": 0.003634426964391504\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.1865049279757392,\n \"acc_stderr\": 0.010729140039689902\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.77663772691397,\n \"acc_stderr\": 0.011705697565205198\n }\n}\n```", "repo_url": "https://huggingface.co/ehartford/WizardLM-33B-V1.0-Uncensored", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|arc:challenge|25_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_12T23_21_17.619828", "path": ["**/details_harness|drop|3_2023-10-12T23-21-17.619828.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-12T23-21-17.619828.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_12T23_21_17.619828", "path": ["**/details_harness|gsm8k|5_2023-10-12T23-21-17.619828.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-12T23-21-17.619828.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hellaswag|10_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-09T10:34:34.277823.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-09T10:34:34.277823.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-09T10:34:34.277823.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_12T23_21_17.619828", "path": ["**/details_harness|winogrande|5_2023-10-12T23-21-17.619828.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-12T23-21-17.619828.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_09T10_34_34.277823", "path": ["results_2023-08-09T10:34:34.277823.parquet"]}, {"split": "2023_10_12T23_21_17.619828", "path": ["results_2023-10-12T23-21-17.619828.parquet"]}, {"split": "latest", "path": ["results_2023-10-12T23-21-17.619828.parquet"]}]}]}
2023-10-12T22:21:33+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of ehartford/WizardLM-33B-V1.0-Uncensored ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model ehartford/WizardLM-33B-V1.0-Uncensored on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-12T23:21:17.619828(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of ehartford/WizardLM-33B-V1.0-Uncensored", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/WizardLM-33B-V1.0-Uncensored on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-12T23:21:17.619828(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of ehartford/WizardLM-33B-V1.0-Uncensored", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/WizardLM-33B-V1.0-Uncensored on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-12T23:21:17.619828(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 26, 31, 174, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of ehartford/WizardLM-33B-V1.0-Uncensored## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/WizardLM-33B-V1.0-Uncensored on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-12T23:21:17.619828(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
e39eb51232df20ab45cad9f21faba6f7b9c23653
# Dataset Card for Evaluation run of ehartford/WizardLM-30B-Uncensored ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/ehartford/WizardLM-30B-Uncensored - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [ehartford/WizardLM-30B-Uncensored](https://huggingface.co/ehartford/WizardLM-30B-Uncensored) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_ehartford__WizardLM-30B-Uncensored", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-13T01:45:13.473461](https://huggingface.co/datasets/open-llm-leaderboard/details_ehartford__WizardLM-30B-Uncensored/blob/main/results_2023-10-13T01-45-13.473461.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.1761744966442953, "em_stderr": 0.0039014746298017622, "f1": 0.2744651845637597, "f1_stderr": 0.004005542048425456, "acc": 0.43618703748803983, "acc_stderr": 0.010751614385080125 }, "harness|drop|3": { "em": 0.1761744966442953, "em_stderr": 0.0039014746298017622, "f1": 0.2744651845637597, "f1_stderr": 0.004005542048425456 }, "harness|gsm8k|5": { "acc": 0.1288855193328279, "acc_stderr": 0.009229580761400267 }, "harness|winogrande|5": { "acc": 0.7434885556432518, "acc_stderr": 0.012273648008759984 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_ehartford__WizardLM-30B-Uncensored
[ "region:us" ]
2023-08-18T10:04:37+00:00
{"pretty_name": "Evaluation run of ehartford/WizardLM-30B-Uncensored", "dataset_summary": "Dataset automatically created during the evaluation run of model [ehartford/WizardLM-30B-Uncensored](https://huggingface.co/ehartford/WizardLM-30B-Uncensored) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ehartford__WizardLM-30B-Uncensored\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-13T01:45:13.473461](https://huggingface.co/datasets/open-llm-leaderboard/details_ehartford__WizardLM-30B-Uncensored/blob/main/results_2023-10-13T01-45-13.473461.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.1761744966442953,\n \"em_stderr\": 0.0039014746298017622,\n \"f1\": 0.2744651845637597,\n \"f1_stderr\": 0.004005542048425456,\n \"acc\": 0.43618703748803983,\n \"acc_stderr\": 0.010751614385080125\n },\n \"harness|drop|3\": {\n \"em\": 0.1761744966442953,\n \"em_stderr\": 0.0039014746298017622,\n \"f1\": 0.2744651845637597,\n \"f1_stderr\": 0.004005542048425456\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.1288855193328279,\n \"acc_stderr\": 0.009229580761400267\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7434885556432518,\n \"acc_stderr\": 0.012273648008759984\n }\n}\n```", "repo_url": "https://huggingface.co/ehartford/WizardLM-30B-Uncensored", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|arc:challenge|25_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_13T01_45_13.473461", "path": ["**/details_harness|drop|3_2023-10-13T01-45-13.473461.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-13T01-45-13.473461.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_13T01_45_13.473461", "path": ["**/details_harness|gsm8k|5_2023-10-13T01-45-13.473461.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-13T01-45-13.473461.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hellaswag|10_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T22:33:53.366908.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T22:33:53.366908.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T22:33:53.366908.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_13T01_45_13.473461", "path": ["**/details_harness|winogrande|5_2023-10-13T01-45-13.473461.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-13T01-45-13.473461.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_19T22_33_53.366908", "path": ["results_2023-07-19T22:33:53.366908.parquet"]}, {"split": "2023_10_13T01_45_13.473461", "path": ["results_2023-10-13T01-45-13.473461.parquet"]}, {"split": "latest", "path": ["results_2023-10-13T01-45-13.473461.parquet"]}]}]}
2023-10-13T00:45:26+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of ehartford/WizardLM-30B-Uncensored ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model ehartford/WizardLM-30B-Uncensored on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-13T01:45:13.473461(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of ehartford/WizardLM-30B-Uncensored", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/WizardLM-30B-Uncensored on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-13T01:45:13.473461(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of ehartford/WizardLM-30B-Uncensored", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/WizardLM-30B-Uncensored on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-13T01:45:13.473461(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 23, 31, 171, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of ehartford/WizardLM-30B-Uncensored## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/WizardLM-30B-Uncensored on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-13T01:45:13.473461(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
b7392465f5403d611df4366fa15a69c2aa74f831
# Dataset Card for Evaluation run of digitous/GPT-R ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/digitous/GPT-R - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [digitous/GPT-R](https://huggingface.co/digitous/GPT-R) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_digitous__GPT-R", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-21T16:59:10.441941](https://huggingface.co/datasets/open-llm-leaderboard/details_digitous__GPT-R/blob/main/results_2023-10-21T16-59-10.441941.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0012583892617449664, "em_stderr": 0.00036305608931189593, "f1": 0.05138632550335586, "f1_stderr": 0.0012400453401352261, "acc": 0.32998109710963497, "acc_stderr": 0.00845227996433148 }, "harness|drop|3": { "em": 0.0012583892617449664, "em_stderr": 0.00036305608931189593, "f1": 0.05138632550335586, "f1_stderr": 0.0012400453401352261 }, "harness|gsm8k|5": { "acc": 0.01592115238817286, "acc_stderr": 0.0034478192723890067 }, "harness|winogrande|5": { "acc": 0.6440410418310971, "acc_stderr": 0.013456740656273952 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_digitous__GPT-R
[ "region:us" ]
2023-08-18T10:04:46+00:00
{"pretty_name": "Evaluation run of digitous/GPT-R", "dataset_summary": "Dataset automatically created during the evaluation run of model [digitous/GPT-R](https://huggingface.co/digitous/GPT-R) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_digitous__GPT-R\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-21T16:59:10.441941](https://huggingface.co/datasets/open-llm-leaderboard/details_digitous__GPT-R/blob/main/results_2023-10-21T16-59-10.441941.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0012583892617449664,\n \"em_stderr\": 0.00036305608931189593,\n \"f1\": 0.05138632550335586,\n \"f1_stderr\": 0.0012400453401352261,\n \"acc\": 0.32998109710963497,\n \"acc_stderr\": 0.00845227996433148\n },\n \"harness|drop|3\": {\n \"em\": 0.0012583892617449664,\n \"em_stderr\": 0.00036305608931189593,\n \"f1\": 0.05138632550335586,\n \"f1_stderr\": 0.0012400453401352261\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.01592115238817286,\n \"acc_stderr\": 0.0034478192723890067\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6440410418310971,\n \"acc_stderr\": 0.013456740656273952\n }\n}\n```", "repo_url": "https://huggingface.co/digitous/GPT-R", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|arc:challenge|25_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_21T16_59_10.441941", "path": ["**/details_harness|drop|3_2023-10-21T16-59-10.441941.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-21T16-59-10.441941.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_21T16_59_10.441941", "path": ["**/details_harness|gsm8k|5_2023-10-21T16-59-10.441941.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-21T16-59-10.441941.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hellaswag|10_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T20:10:48.990479.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T20:10:48.990479.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T20:10:48.990479.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_21T16_59_10.441941", "path": ["**/details_harness|winogrande|5_2023-10-21T16-59-10.441941.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-21T16-59-10.441941.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_19T20_10_48.990479", "path": ["results_2023-07-19T20:10:48.990479.parquet"]}, {"split": "2023_10_21T16_59_10.441941", "path": ["results_2023-10-21T16-59-10.441941.parquet"]}, {"split": "latest", "path": ["results_2023-10-21T16-59-10.441941.parquet"]}]}]}
2023-10-21T15:59:22+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of digitous/GPT-R ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model digitous/GPT-R on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-21T16:59:10.441941(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of digitous/GPT-R", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model digitous/GPT-R on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-21T16:59:10.441941(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of digitous/GPT-R", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model digitous/GPT-R on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-21T16:59:10.441941(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 16, 31, 164, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of digitous/GPT-R## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model digitous/GPT-R on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-21T16:59:10.441941(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
f29b39439fdefc96d722de371a5c871f102b6f21
# Dataset Card for Evaluation run of digitous/Skegma-GPTJ ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/digitous/Skegma-GPTJ - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [digitous/Skegma-GPTJ](https://huggingface.co/digitous/Skegma-GPTJ) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_digitous__Skegma-GPTJ", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-22T01:11:06.361461](https://huggingface.co/datasets/open-llm-leaderboard/details_digitous__Skegma-GPTJ/blob/main/results_2023-10-22T01-11-06.361461.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0006291946308724832, "em_stderr": 0.0002568002749723976, "f1": 0.04913485738255054, "f1_stderr": 0.0012043047173197863, "acc": 0.33078592102672794, "acc_stderr": 0.008401282106163142 }, "harness|drop|3": { "em": 0.0006291946308724832, "em_stderr": 0.0002568002749723976, "f1": 0.04913485738255054, "f1_stderr": 0.0012043047173197863 }, "harness|gsm8k|5": { "acc": 0.015163002274450341, "acc_stderr": 0.0033660229497263316 }, "harness|winogrande|5": { "acc": 0.6464088397790055, "acc_stderr": 0.013436541262599952 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_digitous__Skegma-GPTJ
[ "region:us" ]
2023-08-18T10:04:55+00:00
{"pretty_name": "Evaluation run of digitous/Skegma-GPTJ", "dataset_summary": "Dataset automatically created during the evaluation run of model [digitous/Skegma-GPTJ](https://huggingface.co/digitous/Skegma-GPTJ) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_digitous__Skegma-GPTJ\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-22T01:11:06.361461](https://huggingface.co/datasets/open-llm-leaderboard/details_digitous__Skegma-GPTJ/blob/main/results_2023-10-22T01-11-06.361461.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0006291946308724832,\n \"em_stderr\": 0.0002568002749723976,\n \"f1\": 0.04913485738255054,\n \"f1_stderr\": 0.0012043047173197863,\n \"acc\": 0.33078592102672794,\n \"acc_stderr\": 0.008401282106163142\n },\n \"harness|drop|3\": {\n \"em\": 0.0006291946308724832,\n \"em_stderr\": 0.0002568002749723976,\n \"f1\": 0.04913485738255054,\n \"f1_stderr\": 0.0012043047173197863\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.015163002274450341,\n \"acc_stderr\": 0.0033660229497263316\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6464088397790055,\n \"acc_stderr\": 0.013436541262599952\n }\n}\n```", "repo_url": "https://huggingface.co/digitous/Skegma-GPTJ", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|arc:challenge|25_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_22T01_11_06.361461", "path": ["**/details_harness|drop|3_2023-10-22T01-11-06.361461.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-22T01-11-06.361461.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_22T01_11_06.361461", "path": ["**/details_harness|gsm8k|5_2023-10-22T01-11-06.361461.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-22T01-11-06.361461.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hellaswag|10_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T19:58:51.471216.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T19:58:51.471216.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T19:58:51.471216.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_22T01_11_06.361461", "path": ["**/details_harness|winogrande|5_2023-10-22T01-11-06.361461.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-22T01-11-06.361461.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_19T19_58_51.471216", "path": ["results_2023-07-19T19:58:51.471216.parquet"]}, {"split": "2023_10_22T01_11_06.361461", "path": ["results_2023-10-22T01-11-06.361461.parquet"]}, {"split": "latest", "path": ["results_2023-10-22T01-11-06.361461.parquet"]}]}]}
2023-10-22T00:11:19+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of digitous/Skegma-GPTJ ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model digitous/Skegma-GPTJ on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-22T01:11:06.361461(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of digitous/Skegma-GPTJ", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model digitous/Skegma-GPTJ on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-22T01:11:06.361461(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of digitous/Skegma-GPTJ", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model digitous/Skegma-GPTJ on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-22T01:11:06.361461(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 20, 31, 168, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of digitous/Skegma-GPTJ## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model digitous/Skegma-GPTJ on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-22T01:11:06.361461(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
14336006e7daa168cb3352c577ed3161af4b7ed8
# Dataset Card for Evaluation run of digitous/Alpacino30b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/digitous/Alpacino30b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [digitous/Alpacino30b](https://huggingface.co/digitous/Alpacino30b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_digitous__Alpacino30b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-09-17T12:47:23.707315](https://huggingface.co/datasets/open-llm-leaderboard/details_digitous__Alpacino30b/blob/main/results_2023-09-17T12-47-23.707315.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.001363255033557047, "em_stderr": 0.00037786091964607055, "f1": 0.06650901845637598, "f1_stderr": 0.0014161819077838128, "acc": 0.47782156604971476, "acc_stderr": 0.010661957664554408 }, "harness|drop|3": { "em": 0.001363255033557047, "em_stderr": 0.00037786091964607055, "f1": 0.06650901845637598, "f1_stderr": 0.0014161819077838128 }, "harness|gsm8k|5": { "acc": 0.15769522365428354, "acc_stderr": 0.01003890157506138 }, "harness|winogrande|5": { "acc": 0.797947908445146, "acc_stderr": 0.011285013754047434 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_digitous__Alpacino30b
[ "region:us" ]
2023-08-18T10:05:03+00:00
{"pretty_name": "Evaluation run of digitous/Alpacino30b", "dataset_summary": "Dataset automatically created during the evaluation run of model [digitous/Alpacino30b](https://huggingface.co/digitous/Alpacino30b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_digitous__Alpacino30b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-09-17T12:47:23.707315](https://huggingface.co/datasets/open-llm-leaderboard/details_digitous__Alpacino30b/blob/main/results_2023-09-17T12-47-23.707315.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.001363255033557047,\n \"em_stderr\": 0.00037786091964607055,\n \"f1\": 0.06650901845637598,\n \"f1_stderr\": 0.0014161819077838128,\n \"acc\": 0.47782156604971476,\n \"acc_stderr\": 0.010661957664554408\n },\n \"harness|drop|3\": {\n \"em\": 0.001363255033557047,\n \"em_stderr\": 0.00037786091964607055,\n \"f1\": 0.06650901845637598,\n \"f1_stderr\": 0.0014161819077838128\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.15769522365428354,\n \"acc_stderr\": 0.01003890157506138\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.797947908445146,\n \"acc_stderr\": 0.011285013754047434\n }\n}\n```", "repo_url": "https://huggingface.co/digitous/Alpacino30b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|arc:challenge|25_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_09_17T12_47_23.707315", "path": ["**/details_harness|drop|3_2023-09-17T12-47-23.707315.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-09-17T12-47-23.707315.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_09_17T12_47_23.707315", "path": ["**/details_harness|gsm8k|5_2023-09-17T12-47-23.707315.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-09-17T12-47-23.707315.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hellaswag|10_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T22:46:17.057330.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T22:46:17.057330.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T22:46:17.057330.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_09_17T12_47_23.707315", "path": ["**/details_harness|winogrande|5_2023-09-17T12-47-23.707315.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-09-17T12-47-23.707315.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_19T22_46_17.057330", "path": ["results_2023-07-19T22:46:17.057330.parquet"]}, {"split": "2023_09_17T12_47_23.707315", "path": ["results_2023-09-17T12-47-23.707315.parquet"]}, {"split": "latest", "path": ["results_2023-09-17T12-47-23.707315.parquet"]}]}]}
2023-09-17T11:47:36+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of digitous/Alpacino30b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model digitous/Alpacino30b on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-09-17T12:47:23.707315(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of digitous/Alpacino30b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model digitous/Alpacino30b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-17T12:47:23.707315(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of digitous/Alpacino30b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model digitous/Alpacino30b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-17T12:47:23.707315(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 17, 31, 165, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of digitous/Alpacino30b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model digitous/Alpacino30b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-09-17T12:47:23.707315(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
9b2ce438c93e5561161d7524fcee1a020e05eb36
# Dataset Card for Evaluation run of digitous/Javalion-R ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/digitous/Javalion-R - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [digitous/Javalion-R](https://huggingface.co/digitous/Javalion-R) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_digitous__Javalion-R", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-12T21:07:25.804829](https://huggingface.co/datasets/open-llm-leaderboard/details_digitous__Javalion-R/blob/main/results_2023-10-12T21-07-25.804829.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0010486577181208054, "em_stderr": 0.0003314581465219256, "f1": 0.04845847315436258, "f1_stderr": 0.0011637240305010866, "acc": 0.34041837679282755, "acc_stderr": 0.008896821469599773 }, "harness|drop|3": { "em": 0.0010486577181208054, "em_stderr": 0.0003314581465219256, "f1": 0.04845847315436258, "f1_stderr": 0.0011637240305010866 }, "harness|gsm8k|5": { "acc": 0.026535253980288095, "acc_stderr": 0.004427045987265169 }, "harness|winogrande|5": { "acc": 0.654301499605367, "acc_stderr": 0.013366596951934376 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_digitous__Javalion-R
[ "region:us" ]
2023-08-18T10:05:12+00:00
{"pretty_name": "Evaluation run of digitous/Javalion-R", "dataset_summary": "Dataset automatically created during the evaluation run of model [digitous/Javalion-R](https://huggingface.co/digitous/Javalion-R) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_digitous__Javalion-R\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-12T21:07:25.804829](https://huggingface.co/datasets/open-llm-leaderboard/details_digitous__Javalion-R/blob/main/results_2023-10-12T21-07-25.804829.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0010486577181208054,\n \"em_stderr\": 0.0003314581465219256,\n \"f1\": 0.04845847315436258,\n \"f1_stderr\": 0.0011637240305010866,\n \"acc\": 0.34041837679282755,\n \"acc_stderr\": 0.008896821469599773\n },\n \"harness|drop|3\": {\n \"em\": 0.0010486577181208054,\n \"em_stderr\": 0.0003314581465219256,\n \"f1\": 0.04845847315436258,\n \"f1_stderr\": 0.0011637240305010866\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.026535253980288095,\n \"acc_stderr\": 0.004427045987265169\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.654301499605367,\n \"acc_stderr\": 0.013366596951934376\n }\n}\n```", "repo_url": "https://huggingface.co/digitous/Javalion-R", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|arc:challenge|25_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_12T21_07_25.804829", "path": ["**/details_harness|drop|3_2023-10-12T21-07-25.804829.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-12T21-07-25.804829.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_12T21_07_25.804829", "path": ["**/details_harness|gsm8k|5_2023-10-12T21-07-25.804829.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-12T21-07-25.804829.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hellaswag|10_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T14:00:54.512853.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T14:00:54.512853.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T14:00:54.512853.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_12T21_07_25.804829", "path": ["**/details_harness|winogrande|5_2023-10-12T21-07-25.804829.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-12T21-07-25.804829.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_19T14_00_54.512853", "path": ["results_2023-07-19T14:00:54.512853.parquet"]}, {"split": "2023_10_12T21_07_25.804829", "path": ["results_2023-10-12T21-07-25.804829.parquet"]}, {"split": "latest", "path": ["results_2023-10-12T21-07-25.804829.parquet"]}]}]}
2023-10-12T20:07:37+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of digitous/Javalion-R ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model digitous/Javalion-R on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-12T21:07:25.804829(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of digitous/Javalion-R", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model digitous/Javalion-R on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-12T21:07:25.804829(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of digitous/Javalion-R", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model digitous/Javalion-R on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-12T21:07:25.804829(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 16, 31, 164, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of digitous/Javalion-R## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model digitous/Javalion-R on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-12T21:07:25.804829(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
6b82815aab3257e3be53b88cc4f992ee45405dfc
# Dataset Card for Evaluation run of digitous/Javelin-GPTJ ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/digitous/Javelin-GPTJ - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [digitous/Javelin-GPTJ](https://huggingface.co/digitous/Javelin-GPTJ) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_digitous__Javelin-GPTJ", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-16T01:31:09.179674](https://huggingface.co/datasets/open-llm-leaderboard/details_digitous__Javelin-GPTJ/blob/main/results_2023-10-16T01-31-09.179674.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0008389261744966443, "em_stderr": 0.0002964962989801232, "f1": 0.04767722315436259, "f1_stderr": 0.0011834240833723825, "acc": 0.3299344233062645, "acc_stderr": 0.008579096533320701 }, "harness|drop|3": { "em": 0.0008389261744966443, "em_stderr": 0.0002964962989801232, "f1": 0.04767722315436259, "f1_stderr": 0.0011834240833723825 }, "harness|gsm8k|5": { "acc": 0.01819560272934041, "acc_stderr": 0.0036816118940738727 }, "harness|winogrande|5": { "acc": 0.6416732438831886, "acc_stderr": 0.01347658117256753 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_digitous__Javelin-GPTJ
[ "region:us" ]
2023-08-18T10:05:20+00:00
{"pretty_name": "Evaluation run of digitous/Javelin-GPTJ", "dataset_summary": "Dataset automatically created during the evaluation run of model [digitous/Javelin-GPTJ](https://huggingface.co/digitous/Javelin-GPTJ) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_digitous__Javelin-GPTJ\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-16T01:31:09.179674](https://huggingface.co/datasets/open-llm-leaderboard/details_digitous__Javelin-GPTJ/blob/main/results_2023-10-16T01-31-09.179674.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0008389261744966443,\n \"em_stderr\": 0.0002964962989801232,\n \"f1\": 0.04767722315436259,\n \"f1_stderr\": 0.0011834240833723825,\n \"acc\": 0.3299344233062645,\n \"acc_stderr\": 0.008579096533320701\n },\n \"harness|drop|3\": {\n \"em\": 0.0008389261744966443,\n \"em_stderr\": 0.0002964962989801232,\n \"f1\": 0.04767722315436259,\n \"f1_stderr\": 0.0011834240833723825\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.01819560272934041,\n \"acc_stderr\": 0.0036816118940738727\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6416732438831886,\n \"acc_stderr\": 0.01347658117256753\n }\n}\n```", "repo_url": "https://huggingface.co/digitous/Javelin-GPTJ", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|arc:challenge|25_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_16T01_31_09.179674", "path": ["**/details_harness|drop|3_2023-10-16T01-31-09.179674.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-16T01-31-09.179674.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_16T01_31_09.179674", "path": ["**/details_harness|gsm8k|5_2023-10-16T01-31-09.179674.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-16T01-31-09.179674.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hellaswag|10_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T14:13:27.511337.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T14:13:27.511337.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T14:13:27.511337.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_16T01_31_09.179674", "path": ["**/details_harness|winogrande|5_2023-10-16T01-31-09.179674.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-16T01-31-09.179674.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_19T14_13_27.511337", "path": ["results_2023-07-19T14:13:27.511337.parquet"]}, {"split": "2023_10_16T01_31_09.179674", "path": ["results_2023-10-16T01-31-09.179674.parquet"]}, {"split": "latest", "path": ["results_2023-10-16T01-31-09.179674.parquet"]}]}]}
2023-10-16T00:31:21+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of digitous/Javelin-GPTJ ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model digitous/Javelin-GPTJ on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-16T01:31:09.179674(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of digitous/Javelin-GPTJ", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model digitous/Javelin-GPTJ on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-16T01:31:09.179674(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of digitous/Javelin-GPTJ", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model digitous/Javelin-GPTJ on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-16T01:31:09.179674(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 19, 31, 167, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of digitous/Javelin-GPTJ## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model digitous/Javelin-GPTJ on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-16T01:31:09.179674(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
76d823dfafd40606b19e30ff2efb3214b9df5d6b
# Dataset Card for Evaluation run of digitous/Janin-GPTJ ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/digitous/Janin-GPTJ - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [digitous/Janin-GPTJ](https://huggingface.co/digitous/Janin-GPTJ) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_digitous__Janin-GPTJ", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-13T01:41:17.922398](https://huggingface.co/datasets/open-llm-leaderboard/details_digitous__Janin-GPTJ/blob/main/results_2023-10-13T01-41-17.922398.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0010486577181208054, "em_stderr": 0.00033145814652192754, "f1": 0.04561451342281894, "f1_stderr": 0.0011266864813108584, "acc": 0.3310872064113051, "acc_stderr": 0.008649495211328202 }, "harness|drop|3": { "em": 0.0010486577181208054, "em_stderr": 0.00033145814652192754, "f1": 0.04561451342281894, "f1_stderr": 0.0011266864813108584 }, "harness|gsm8k|5": { "acc": 0.019711902956785442, "acc_stderr": 0.0038289829787357143 }, "harness|winogrande|5": { "acc": 0.6424625098658248, "acc_stderr": 0.013470007443920688 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_digitous__Janin-GPTJ
[ "region:us" ]
2023-08-18T10:05:28+00:00
{"pretty_name": "Evaluation run of digitous/Janin-GPTJ", "dataset_summary": "Dataset automatically created during the evaluation run of model [digitous/Janin-GPTJ](https://huggingface.co/digitous/Janin-GPTJ) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_digitous__Janin-GPTJ\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-13T01:41:17.922398](https://huggingface.co/datasets/open-llm-leaderboard/details_digitous__Janin-GPTJ/blob/main/results_2023-10-13T01-41-17.922398.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0010486577181208054,\n \"em_stderr\": 0.00033145814652192754,\n \"f1\": 0.04561451342281894,\n \"f1_stderr\": 0.0011266864813108584,\n \"acc\": 0.3310872064113051,\n \"acc_stderr\": 0.008649495211328202\n },\n \"harness|drop|3\": {\n \"em\": 0.0010486577181208054,\n \"em_stderr\": 0.00033145814652192754,\n \"f1\": 0.04561451342281894,\n \"f1_stderr\": 0.0011266864813108584\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.019711902956785442,\n \"acc_stderr\": 0.0038289829787357143\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6424625098658248,\n \"acc_stderr\": 0.013470007443920688\n }\n}\n```", "repo_url": "https://huggingface.co/digitous/Janin-GPTJ", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|arc:challenge|25_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_13T01_41_17.922398", "path": ["**/details_harness|drop|3_2023-10-13T01-41-17.922398.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-13T01-41-17.922398.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_13T01_41_17.922398", "path": ["**/details_harness|gsm8k|5_2023-10-13T01-41-17.922398.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-13T01-41-17.922398.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hellaswag|10_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T20:10:14.286796.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T20:10:14.286796.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T20:10:14.286796.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_13T01_41_17.922398", "path": ["**/details_harness|winogrande|5_2023-10-13T01-41-17.922398.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-13T01-41-17.922398.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_19T20_10_14.286796", "path": ["results_2023-07-19T20:10:14.286796.parquet"]}, {"split": "2023_10_13T01_41_17.922398", "path": ["results_2023-10-13T01-41-17.922398.parquet"]}, {"split": "latest", "path": ["results_2023-10-13T01-41-17.922398.parquet"]}]}]}
2023-10-13T00:41:29+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of digitous/Janin-GPTJ ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model digitous/Janin-GPTJ on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-13T01:41:17.922398(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of digitous/Janin-GPTJ", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model digitous/Janin-GPTJ on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-13T01:41:17.922398(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of digitous/Janin-GPTJ", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model digitous/Janin-GPTJ on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-13T01:41:17.922398(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 18, 31, 166, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of digitous/Janin-GPTJ## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model digitous/Janin-GPTJ on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-13T01:41:17.922398(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
05a76fc24c97379eda30f94e6e4a7f513606302e
# Dataset Card for Evaluation run of digitous/Javelin-R ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/digitous/Javelin-R - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [digitous/Javelin-R](https://huggingface.co/digitous/Javelin-R) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_digitous__Javelin-R", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-17T16:47:23.562896](https://huggingface.co/datasets/open-llm-leaderboard/details_digitous__Javelin-R/blob/main/results_2023-10-17T16-47-23.562896.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0010486577181208054, "em_stderr": 0.0003314581465219256, "f1": 0.05006606543624186, "f1_stderr": 0.001221286433761839, "acc": 0.3323333371230866, "acc_stderr": 0.008475235356826075 }, "harness|drop|3": { "em": 0.0010486577181208054, "em_stderr": 0.0003314581465219256, "f1": 0.05006606543624186, "f1_stderr": 0.001221286433761839 }, "harness|gsm8k|5": { "acc": 0.016679302501895376, "acc_stderr": 0.0035275958887224313 }, "harness|winogrande|5": { "acc": 0.6479873717442778, "acc_stderr": 0.01342287482492972 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_digitous__Javelin-R
[ "region:us" ]
2023-08-18T10:05:37+00:00
{"pretty_name": "Evaluation run of digitous/Javelin-R", "dataset_summary": "Dataset automatically created during the evaluation run of model [digitous/Javelin-R](https://huggingface.co/digitous/Javelin-R) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_digitous__Javelin-R\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-17T16:47:23.562896](https://huggingface.co/datasets/open-llm-leaderboard/details_digitous__Javelin-R/blob/main/results_2023-10-17T16-47-23.562896.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0010486577181208054,\n \"em_stderr\": 0.0003314581465219256,\n \"f1\": 0.05006606543624186,\n \"f1_stderr\": 0.001221286433761839,\n \"acc\": 0.3323333371230866,\n \"acc_stderr\": 0.008475235356826075\n },\n \"harness|drop|3\": {\n \"em\": 0.0010486577181208054,\n \"em_stderr\": 0.0003314581465219256,\n \"f1\": 0.05006606543624186,\n \"f1_stderr\": 0.001221286433761839\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.016679302501895376,\n \"acc_stderr\": 0.0035275958887224313\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6479873717442778,\n \"acc_stderr\": 0.01342287482492972\n }\n}\n```", "repo_url": "https://huggingface.co/digitous/Javelin-R", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|arc:challenge|25_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_17T16_47_23.562896", "path": ["**/details_harness|drop|3_2023-10-17T16-47-23.562896.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-17T16-47-23.562896.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_17T16_47_23.562896", "path": ["**/details_harness|gsm8k|5_2023-10-17T16-47-23.562896.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-17T16-47-23.562896.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hellaswag|10_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T19:50:05.826283.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T19:50:05.826283.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T19:50:05.826283.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_17T16_47_23.562896", "path": ["**/details_harness|winogrande|5_2023-10-17T16-47-23.562896.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-17T16-47-23.562896.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_19T19_50_05.826283", "path": ["results_2023-07-19T19:50:05.826283.parquet"]}, {"split": "2023_10_17T16_47_23.562896", "path": ["results_2023-10-17T16-47-23.562896.parquet"]}, {"split": "latest", "path": ["results_2023-10-17T16-47-23.562896.parquet"]}]}]}
2023-10-17T15:47:36+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of digitous/Javelin-R ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model digitous/Javelin-R on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-17T16:47:23.562896(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of digitous/Javelin-R", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model digitous/Javelin-R on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-17T16:47:23.562896(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of digitous/Javelin-R", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model digitous/Javelin-R on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-17T16:47:23.562896(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 17, 31, 165, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of digitous/Javelin-R## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model digitous/Javelin-R on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-17T16:47:23.562896(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
99eb1c9ab0409802cd0f6953e189b63751b3288d
# Dataset Card for Evaluation run of digitous/Janin-R ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/digitous/Janin-R - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [digitous/Janin-R](https://huggingface.co/digitous/Janin-R) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_digitous__Janin-R", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-09-17T03:14:06.115114](https://huggingface.co/datasets/open-llm-leaderboard/details_digitous__Janin-R/blob/main/results_2023-09-17T03-14-06.115114.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.001153523489932886, "em_stderr": 0.0003476179896857095, "f1": 0.04803796140939615, "f1_stderr": 0.0011624552972241407, "acc": 0.3381283685172032, "acc_stderr": 0.00874019702471766 }, "harness|drop|3": { "em": 0.001153523489932886, "em_stderr": 0.0003476179896857095, "f1": 0.04803796140939615, "f1_stderr": 0.0011624552972241407 }, "harness|gsm8k|5": { "acc": 0.022744503411675512, "acc_stderr": 0.004106620637749676 }, "harness|winogrande|5": { "acc": 0.6535122336227308, "acc_stderr": 0.013373773411685646 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_digitous__Janin-R
[ "region:us" ]
2023-08-18T10:05:46+00:00
{"pretty_name": "Evaluation run of digitous/Janin-R", "dataset_summary": "Dataset automatically created during the evaluation run of model [digitous/Janin-R](https://huggingface.co/digitous/Janin-R) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_digitous__Janin-R\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-09-17T03:14:06.115114](https://huggingface.co/datasets/open-llm-leaderboard/details_digitous__Janin-R/blob/main/results_2023-09-17T03-14-06.115114.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.001153523489932886,\n \"em_stderr\": 0.0003476179896857095,\n \"f1\": 0.04803796140939615,\n \"f1_stderr\": 0.0011624552972241407,\n \"acc\": 0.3381283685172032,\n \"acc_stderr\": 0.00874019702471766\n },\n \"harness|drop|3\": {\n \"em\": 0.001153523489932886,\n \"em_stderr\": 0.0003476179896857095,\n \"f1\": 0.04803796140939615,\n \"f1_stderr\": 0.0011624552972241407\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.022744503411675512,\n \"acc_stderr\": 0.004106620637749676\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6535122336227308,\n \"acc_stderr\": 0.013373773411685646\n }\n}\n```", "repo_url": "https://huggingface.co/digitous/Janin-R", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|arc:challenge|25_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_09_17T03_14_06.115114", "path": ["**/details_harness|drop|3_2023-09-17T03-14-06.115114.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-09-17T03-14-06.115114.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_09_17T03_14_06.115114", "path": ["**/details_harness|gsm8k|5_2023-09-17T03-14-06.115114.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-09-17T03-14-06.115114.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hellaswag|10_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T19:29:39.251365.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T19:29:39.251365.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T19:29:39.251365.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_09_17T03_14_06.115114", "path": ["**/details_harness|winogrande|5_2023-09-17T03-14-06.115114.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-09-17T03-14-06.115114.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_19T19_29_39.251365", "path": ["results_2023-07-19T19:29:39.251365.parquet"]}, {"split": "2023_09_17T03_14_06.115114", "path": ["results_2023-09-17T03-14-06.115114.parquet"]}, {"split": "latest", "path": ["results_2023-09-17T03-14-06.115114.parquet"]}]}]}
2023-09-17T02:14:17+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of digitous/Janin-R ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model digitous/Janin-R on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-09-17T03:14:06.115114(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of digitous/Janin-R", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model digitous/Janin-R on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-17T03:14:06.115114(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of digitous/Janin-R", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model digitous/Janin-R on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-17T03:14:06.115114(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 16, 31, 164, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of digitous/Janin-R## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model digitous/Janin-R on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-09-17T03:14:06.115114(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
18c196a5a0a771cd65e2e3e69d089a9e9b3dbf19
# Dataset Card for Evaluation run of digitous/13B-HyperMantis ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/digitous/13B-HyperMantis - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [digitous/13B-HyperMantis](https://huggingface.co/digitous/13B-HyperMantis) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_digitous__13B-HyperMantis", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-16T02:31:52.338573](https://huggingface.co/datasets/open-llm-leaderboard/details_digitous__13B-HyperMantis/blob/main/results_2023-10-16T02-31-52.338573.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.12804110738255034, "em_stderr": 0.0034218610287585043, "f1": 0.195454068791946, "f1_stderr": 0.0035590395888605362, "acc": 0.4331487524032521, "acc_stderr": 0.010182460567363586 }, "harness|drop|3": { "em": 0.12804110738255034, "em_stderr": 0.0034218610287585043, "f1": 0.195454068791946, "f1_stderr": 0.0035590395888605362 }, "harness|gsm8k|5": { "acc": 0.10386656557998483, "acc_stderr": 0.008403622228924032 }, "harness|winogrande|5": { "acc": 0.7624309392265194, "acc_stderr": 0.01196129890580314 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_digitous__13B-HyperMantis
[ "region:us" ]
2023-08-18T10:05:55+00:00
{"pretty_name": "Evaluation run of digitous/13B-HyperMantis", "dataset_summary": "Dataset automatically created during the evaluation run of model [digitous/13B-HyperMantis](https://huggingface.co/digitous/13B-HyperMantis) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_digitous__13B-HyperMantis\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-16T02:31:52.338573](https://huggingface.co/datasets/open-llm-leaderboard/details_digitous__13B-HyperMantis/blob/main/results_2023-10-16T02-31-52.338573.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.12804110738255034,\n \"em_stderr\": 0.0034218610287585043,\n \"f1\": 0.195454068791946,\n \"f1_stderr\": 0.0035590395888605362,\n \"acc\": 0.4331487524032521,\n \"acc_stderr\": 0.010182460567363586\n },\n \"harness|drop|3\": {\n \"em\": 0.12804110738255034,\n \"em_stderr\": 0.0034218610287585043,\n \"f1\": 0.195454068791946,\n \"f1_stderr\": 0.0035590395888605362\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.10386656557998483,\n \"acc_stderr\": 0.008403622228924032\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7624309392265194,\n \"acc_stderr\": 0.01196129890580314\n }\n}\n```", "repo_url": "https://huggingface.co/digitous/13B-HyperMantis", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|arc:challenge|25_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_16T02_31_52.338573", "path": ["**/details_harness|drop|3_2023-10-16T02-31-52.338573.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-16T02-31-52.338573.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_16T02_31_52.338573", "path": ["**/details_harness|gsm8k|5_2023-10-16T02-31-52.338573.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-16T02-31-52.338573.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hellaswag|10_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T19:30:10.108453.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T19:30:10.108453.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T19:30:10.108453.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_16T02_31_52.338573", "path": ["**/details_harness|winogrande|5_2023-10-16T02-31-52.338573.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-16T02-31-52.338573.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_19T19_30_10.108453", "path": ["results_2023-07-19T19:30:10.108453.parquet"]}, {"split": "2023_10_16T02_31_52.338573", "path": ["results_2023-10-16T02-31-52.338573.parquet"]}, {"split": "latest", "path": ["results_2023-10-16T02-31-52.338573.parquet"]}]}]}
2023-10-16T01:32:04+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of digitous/13B-HyperMantis ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model digitous/13B-HyperMantis on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-16T02:31:52.338573(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of digitous/13B-HyperMantis", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model digitous/13B-HyperMantis on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-16T02:31:52.338573(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of digitous/13B-HyperMantis", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model digitous/13B-HyperMantis on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-16T02:31:52.338573(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 18, 31, 166, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of digitous/13B-HyperMantis## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model digitous/13B-HyperMantis on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-16T02:31:52.338573(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
fc5eb1270c3ef326161dba96e5278fec63ef9712
# Dataset Card for Evaluation run of digitous/Alpacino13b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/digitous/Alpacino13b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [digitous/Alpacino13b](https://huggingface.co/digitous/Alpacino13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_digitous__Alpacino13b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-15T02:53:57.964177](https://huggingface.co/datasets/open-llm-leaderboard/details_digitous__Alpacino13b/blob/main/results_2023-10-15T02-53-57.964177.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.002726510067114094, "em_stderr": 0.0005340111700415912, "f1": 0.060902894295302074, "f1_stderr": 0.0013623642919700313, "acc": 0.4245700475055545, "acc_stderr": 0.009645898251756477 }, "harness|drop|3": { "em": 0.002726510067114094, "em_stderr": 0.0005340111700415912, "f1": 0.060902894295302074, "f1_stderr": 0.0013623642919700313 }, "harness|gsm8k|5": { "acc": 0.07960576194086429, "acc_stderr": 0.007455924338676276 }, "harness|winogrande|5": { "acc": 0.7695343330702447, "acc_stderr": 0.011835872164836676 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_digitous__Alpacino13b
[ "region:us" ]
2023-08-18T10:06:04+00:00
{"pretty_name": "Evaluation run of digitous/Alpacino13b", "dataset_summary": "Dataset automatically created during the evaluation run of model [digitous/Alpacino13b](https://huggingface.co/digitous/Alpacino13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_digitous__Alpacino13b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-15T02:53:57.964177](https://huggingface.co/datasets/open-llm-leaderboard/details_digitous__Alpacino13b/blob/main/results_2023-10-15T02-53-57.964177.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.002726510067114094,\n \"em_stderr\": 0.0005340111700415912,\n \"f1\": 0.060902894295302074,\n \"f1_stderr\": 0.0013623642919700313,\n \"acc\": 0.4245700475055545,\n \"acc_stderr\": 0.009645898251756477\n },\n \"harness|drop|3\": {\n \"em\": 0.002726510067114094,\n \"em_stderr\": 0.0005340111700415912,\n \"f1\": 0.060902894295302074,\n \"f1_stderr\": 0.0013623642919700313\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.07960576194086429,\n \"acc_stderr\": 0.007455924338676276\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7695343330702447,\n \"acc_stderr\": 0.011835872164836676\n }\n}\n```", "repo_url": "https://huggingface.co/digitous/Alpacino13b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|arc:challenge|25_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_15T02_53_57.964177", "path": ["**/details_harness|drop|3_2023-10-15T02-53-57.964177.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-15T02-53-57.964177.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_15T02_53_57.964177", "path": ["**/details_harness|gsm8k|5_2023-10-15T02-53-57.964177.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-15T02-53-57.964177.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hellaswag|10_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T19:38:18.713837.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T19:38:18.713837.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T19:38:18.713837.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_15T02_53_57.964177", "path": ["**/details_harness|winogrande|5_2023-10-15T02-53-57.964177.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-15T02-53-57.964177.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_19T19_38_18.713837", "path": ["results_2023-07-19T19:38:18.713837.parquet"]}, {"split": "2023_10_15T02_53_57.964177", "path": ["results_2023-10-15T02-53-57.964177.parquet"]}, {"split": "latest", "path": ["results_2023-10-15T02-53-57.964177.parquet"]}]}]}
2023-10-15T01:54:10+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of digitous/Alpacino13b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model digitous/Alpacino13b on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-15T02:53:57.964177(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of digitous/Alpacino13b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model digitous/Alpacino13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-15T02:53:57.964177(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of digitous/Alpacino13b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model digitous/Alpacino13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-15T02:53:57.964177(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 17, 31, 165, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of digitous/Alpacino13b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model digitous/Alpacino13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-15T02:53:57.964177(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
2203dac77df9cc9af0044a320f0647d98f9f1a21
# Dataset Card for Evaluation run of digitous/Adventien-GPTJ ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/digitous/Adventien-GPTJ - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [digitous/Adventien-GPTJ](https://huggingface.co/digitous/Adventien-GPTJ) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_digitous__Adventien-GPTJ", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-09-22T18:30:15.376611](https://huggingface.co/datasets/open-llm-leaderboard/details_digitous__Adventien-GPTJ/blob/main/results_2023-09-22T18-30-15.376611.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0008389261744966443, "em_stderr": 0.0002964962989801232, "f1": 0.04690331375838923, "f1_stderr": 0.0011372681519599575, "acc": 0.309065548569777, "acc_stderr": 0.008601781393069023 }, "harness|drop|3": { "em": 0.0008389261744966443, "em_stderr": 0.0002964962989801232, "f1": 0.04690331375838923, "f1_stderr": 0.0011372681519599575 }, "harness|gsm8k|5": { "acc": 0.01592115238817286, "acc_stderr": 0.003447819272389025 }, "harness|winogrande|5": { "acc": 0.6022099447513812, "acc_stderr": 0.013755743513749022 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_digitous__Adventien-GPTJ
[ "region:us" ]
2023-08-18T10:06:13+00:00
{"pretty_name": "Evaluation run of digitous/Adventien-GPTJ", "dataset_summary": "Dataset automatically created during the evaluation run of model [digitous/Adventien-GPTJ](https://huggingface.co/digitous/Adventien-GPTJ) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_digitous__Adventien-GPTJ\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-09-22T18:30:15.376611](https://huggingface.co/datasets/open-llm-leaderboard/details_digitous__Adventien-GPTJ/blob/main/results_2023-09-22T18-30-15.376611.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0008389261744966443,\n \"em_stderr\": 0.0002964962989801232,\n \"f1\": 0.04690331375838923,\n \"f1_stderr\": 0.0011372681519599575,\n \"acc\": 0.309065548569777,\n \"acc_stderr\": 0.008601781393069023\n },\n \"harness|drop|3\": {\n \"em\": 0.0008389261744966443,\n \"em_stderr\": 0.0002964962989801232,\n \"f1\": 0.04690331375838923,\n \"f1_stderr\": 0.0011372681519599575\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.01592115238817286,\n \"acc_stderr\": 0.003447819272389025\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6022099447513812,\n \"acc_stderr\": 0.013755743513749022\n }\n}\n```", "repo_url": "https://huggingface.co/digitous/Adventien-GPTJ", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|arc:challenge|25_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_09_22T18_30_15.376611", "path": ["**/details_harness|drop|3_2023-09-22T18-30-15.376611.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-09-22T18-30-15.376611.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_09_22T18_30_15.376611", "path": ["**/details_harness|gsm8k|5_2023-09-22T18-30-15.376611.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-09-22T18-30-15.376611.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hellaswag|10_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T20:04:02.923110.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T20:04:02.923110.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T20:04:02.923110.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_09_22T18_30_15.376611", "path": ["**/details_harness|winogrande|5_2023-09-22T18-30-15.376611.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-09-22T18-30-15.376611.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_19T20_04_02.923110", "path": ["results_2023-07-19T20:04:02.923110.parquet"]}, {"split": "2023_09_22T18_30_15.376611", "path": ["results_2023-09-22T18-30-15.376611.parquet"]}, {"split": "latest", "path": ["results_2023-09-22T18-30-15.376611.parquet"]}]}]}
2023-09-22T17:30:27+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of digitous/Adventien-GPTJ ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model digitous/Adventien-GPTJ on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-09-22T18:30:15.376611(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of digitous/Adventien-GPTJ", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model digitous/Adventien-GPTJ on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-22T18:30:15.376611(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of digitous/Adventien-GPTJ", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model digitous/Adventien-GPTJ on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-22T18:30:15.376611(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 19, 31, 167, 68, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of digitous/Adventien-GPTJ## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model digitous/Adventien-GPTJ on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-09-22T18:30:15.376611(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
70629bca1a12f732da696a97d321fc12d0458b78
# Dataset Card for Evaluation run of nthngdy/pythia-owt2-70m-100k ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/nthngdy/pythia-owt2-70m-100k - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [nthngdy/pythia-owt2-70m-100k](https://huggingface.co/nthngdy/pythia-owt2-70m-100k) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_nthngdy__pythia-owt2-70m-100k", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-09-16T16:58:18.157087](https://huggingface.co/datasets/open-llm-leaderboard/details_nthngdy__pythia-owt2-70m-100k/blob/main/results_2023-09-16T16-58-18.157087.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.01960989932885906, "em_stderr": 0.00141996222824606, "f1": 0.0546665268456376, "f1_stderr": 0.0018294405855806455, "acc": 0.26637726913970006, "acc_stderr": 0.007011150285217067 }, "harness|drop|3": { "em": 0.01960989932885906, "em_stderr": 0.00141996222824606, "f1": 0.0546665268456376, "f1_stderr": 0.0018294405855806455 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 }, "harness|winogrande|5": { "acc": 0.5327545382794001, "acc_stderr": 0.014022300570434134 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_nthngdy__pythia-owt2-70m-100k
[ "region:us" ]
2023-08-18T10:06:21+00:00
{"pretty_name": "Evaluation run of nthngdy/pythia-owt2-70m-100k", "dataset_summary": "Dataset automatically created during the evaluation run of model [nthngdy/pythia-owt2-70m-100k](https://huggingface.co/nthngdy/pythia-owt2-70m-100k) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_nthngdy__pythia-owt2-70m-100k\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-09-16T16:58:18.157087](https://huggingface.co/datasets/open-llm-leaderboard/details_nthngdy__pythia-owt2-70m-100k/blob/main/results_2023-09-16T16-58-18.157087.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.01960989932885906,\n \"em_stderr\": 0.00141996222824606,\n \"f1\": 0.0546665268456376,\n \"f1_stderr\": 0.0018294405855806455,\n \"acc\": 0.26637726913970006,\n \"acc_stderr\": 0.007011150285217067\n },\n \"harness|drop|3\": {\n \"em\": 0.01960989932885906,\n \"em_stderr\": 0.00141996222824606,\n \"f1\": 0.0546665268456376,\n \"f1_stderr\": 0.0018294405855806455\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5327545382794001,\n \"acc_stderr\": 0.014022300570434134\n }\n}\n```", "repo_url": "https://huggingface.co/nthngdy/pythia-owt2-70m-100k", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|arc:challenge|25_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_09_16T16_58_18.157087", "path": ["**/details_harness|drop|3_2023-09-16T16-58-18.157087.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-09-16T16-58-18.157087.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_09_16T16_58_18.157087", "path": ["**/details_harness|gsm8k|5_2023-09-16T16-58-18.157087.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-09-16T16-58-18.157087.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hellaswag|10_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T13:34:55.847761.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T13:34:55.847761.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T13:34:55.847761.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_09_16T16_58_18.157087", "path": ["**/details_harness|winogrande|5_2023-09-16T16-58-18.157087.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-09-16T16-58-18.157087.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_19T13_34_55.847761", "path": ["results_2023-07-19T13:34:55.847761.parquet"]}, {"split": "2023_09_16T16_58_18.157087", "path": ["results_2023-09-16T16-58-18.157087.parquet"]}, {"split": "latest", "path": ["results_2023-09-16T16-58-18.157087.parquet"]}]}]}
2023-09-16T15:58:29+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of nthngdy/pythia-owt2-70m-100k ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model nthngdy/pythia-owt2-70m-100k on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-09-16T16:58:18.157087(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of nthngdy/pythia-owt2-70m-100k", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model nthngdy/pythia-owt2-70m-100k on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-16T16:58:18.157087(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of nthngdy/pythia-owt2-70m-100k", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model nthngdy/pythia-owt2-70m-100k on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-16T16:58:18.157087(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 24, 31, 172, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of nthngdy/pythia-owt2-70m-100k## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model nthngdy/pythia-owt2-70m-100k on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-09-16T16:58:18.157087(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
f2339b56ba51b79d78267fe1213a9710a68b40d4
# Dataset Card for Evaluation run of nthngdy/pythia-owt2-70m-50k ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/nthngdy/pythia-owt2-70m-50k - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [nthngdy/pythia-owt2-70m-50k](https://huggingface.co/nthngdy/pythia-owt2-70m-50k) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_nthngdy__pythia-owt2-70m-50k", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-09-17T20:43:29.834346](https://huggingface.co/datasets/open-llm-leaderboard/details_nthngdy__pythia-owt2-70m-50k/blob/main/results_2023-09-17T20-43-29.834346.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.01164010067114094, "em_stderr": 0.0010984380734033166, "f1": 0.042847105704697984, "f1_stderr": 0.0015153408758494672, "acc": 0.26203630623520124, "acc_stderr": 0.007018094832697564 }, "harness|drop|3": { "em": 0.01164010067114094, "em_stderr": 0.0010984380734033166, "f1": 0.042847105704697984, "f1_stderr": 0.0015153408758494672 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 }, "harness|winogrande|5": { "acc": 0.5240726124704025, "acc_stderr": 0.014036189665395127 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_nthngdy__pythia-owt2-70m-50k
[ "region:us" ]
2023-08-18T10:06:30+00:00
{"pretty_name": "Evaluation run of nthngdy/pythia-owt2-70m-50k", "dataset_summary": "Dataset automatically created during the evaluation run of model [nthngdy/pythia-owt2-70m-50k](https://huggingface.co/nthngdy/pythia-owt2-70m-50k) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_nthngdy__pythia-owt2-70m-50k\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-09-17T20:43:29.834346](https://huggingface.co/datasets/open-llm-leaderboard/details_nthngdy__pythia-owt2-70m-50k/blob/main/results_2023-09-17T20-43-29.834346.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.01164010067114094,\n \"em_stderr\": 0.0010984380734033166,\n \"f1\": 0.042847105704697984,\n \"f1_stderr\": 0.0015153408758494672,\n \"acc\": 0.26203630623520124,\n \"acc_stderr\": 0.007018094832697564\n },\n \"harness|drop|3\": {\n \"em\": 0.01164010067114094,\n \"em_stderr\": 0.0010984380734033166,\n \"f1\": 0.042847105704697984,\n \"f1_stderr\": 0.0015153408758494672\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5240726124704025,\n \"acc_stderr\": 0.014036189665395127\n }\n}\n```", "repo_url": "https://huggingface.co/nthngdy/pythia-owt2-70m-50k", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|arc:challenge|25_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_09_17T20_43_29.834346", "path": ["**/details_harness|drop|3_2023-09-17T20-43-29.834346.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-09-17T20-43-29.834346.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_09_17T20_43_29.834346", "path": ["**/details_harness|gsm8k|5_2023-09-17T20-43-29.834346.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-09-17T20-43-29.834346.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hellaswag|10_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T13:38:15.957411.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T13:38:15.957411.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T13:38:15.957411.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_09_17T20_43_29.834346", "path": ["**/details_harness|winogrande|5_2023-09-17T20-43-29.834346.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-09-17T20-43-29.834346.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_19T13_38_15.957411", "path": ["results_2023-07-19T13:38:15.957411.parquet"]}, {"split": "2023_09_17T20_43_29.834346", "path": ["results_2023-09-17T20-43-29.834346.parquet"]}, {"split": "latest", "path": ["results_2023-09-17T20-43-29.834346.parquet"]}]}]}
2023-09-17T19:43:41+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of nthngdy/pythia-owt2-70m-50k ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model nthngdy/pythia-owt2-70m-50k on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-09-17T20:43:29.834346(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of nthngdy/pythia-owt2-70m-50k", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model nthngdy/pythia-owt2-70m-50k on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-17T20:43:29.834346(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of nthngdy/pythia-owt2-70m-50k", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model nthngdy/pythia-owt2-70m-50k on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-17T20:43:29.834346(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 24, 31, 172, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of nthngdy/pythia-owt2-70m-50k## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model nthngdy/pythia-owt2-70m-50k on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-09-17T20:43:29.834346(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
237360649029ae473bebcfb82b01dcd86c9b447e
# Dataset Card for Evaluation run of dfurman/llama-2-70b-dolphin-peft ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/dfurman/llama-2-70b-dolphin-peft - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [dfurman/llama-2-70b-dolphin-peft](https://huggingface.co/dfurman/llama-2-70b-dolphin-peft) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_dfurman__llama-2-70b-dolphin-peft", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-05T00:46:08.934942](https://huggingface.co/datasets/open-llm-leaderboard/details_dfurman__llama-2-70b-dolphin-peft/blob/main/results_2023-10-05T00-46-08.934942.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.002307046979865772, "em_stderr": 0.0004913221265094568, "f1": 0.0702915268456376, "f1_stderr": 0.0014330013107730173, "acc": 0.5563409652980272, "acc_stderr": 0.011305358161874588 }, "harness|drop|3": { "em": 0.002307046979865772, "em_stderr": 0.0004913221265094568, "f1": 0.0702915268456376, "f1_stderr": 0.0014330013107730173 }, "harness|gsm8k|5": { "acc": 0.27369219105382864, "acc_stderr": 0.012281003490963456 }, "harness|winogrande|5": { "acc": 0.8389897395422258, "acc_stderr": 0.01032971283278572 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_dfurman__llama-2-70b-dolphin-peft
[ "region:us" ]
2023-08-18T10:06:39+00:00
{"pretty_name": "Evaluation run of dfurman/llama-2-70b-dolphin-peft", "dataset_summary": "Dataset automatically created during the evaluation run of model [dfurman/llama-2-70b-dolphin-peft](https://huggingface.co/dfurman/llama-2-70b-dolphin-peft) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_dfurman__llama-2-70b-dolphin-peft\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-05T00:46:08.934942](https://huggingface.co/datasets/open-llm-leaderboard/details_dfurman__llama-2-70b-dolphin-peft/blob/main/results_2023-10-05T00-46-08.934942.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.002307046979865772,\n \"em_stderr\": 0.0004913221265094568,\n \"f1\": 0.0702915268456376,\n \"f1_stderr\": 0.0014330013107730173,\n \"acc\": 0.5563409652980272,\n \"acc_stderr\": 0.011305358161874588\n },\n \"harness|drop|3\": {\n \"em\": 0.002307046979865772,\n \"em_stderr\": 0.0004913221265094568,\n \"f1\": 0.0702915268456376,\n \"f1_stderr\": 0.0014330013107730173\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.27369219105382864,\n \"acc_stderr\": 0.012281003490963456\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8389897395422258,\n \"acc_stderr\": 0.01032971283278572\n }\n}\n```", "repo_url": "https://huggingface.co/dfurman/llama-2-70b-dolphin-peft", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|arc:challenge|25_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_05T00_46_08.934942", "path": ["**/details_harness|drop|3_2023-10-05T00-46-08.934942.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-05T00-46-08.934942.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_05T00_46_08.934942", "path": ["**/details_harness|gsm8k|5_2023-10-05T00-46-08.934942.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-05T00-46-08.934942.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hellaswag|10_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-04T21:00:53.208892.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-04T21:00:53.208892.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-04T21:00:53.208892.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_05T00_46_08.934942", "path": ["**/details_harness|winogrande|5_2023-10-05T00-46-08.934942.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-05T00-46-08.934942.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_04T21_00_53.208892", "path": ["results_2023-08-04T21:00:53.208892.parquet"]}, {"split": "2023_10_05T00_46_08.934942", "path": ["results_2023-10-05T00-46-08.934942.parquet"]}, {"split": "latest", "path": ["results_2023-10-05T00-46-08.934942.parquet"]}]}]}
2023-10-04T23:46:22+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of dfurman/llama-2-70b-dolphin-peft ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model dfurman/llama-2-70b-dolphin-peft on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-05T00:46:08.934942(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of dfurman/llama-2-70b-dolphin-peft", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model dfurman/llama-2-70b-dolphin-peft on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-05T00:46:08.934942(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of dfurman/llama-2-70b-dolphin-peft", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model dfurman/llama-2-70b-dolphin-peft on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-05T00:46:08.934942(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 25, 31, 173, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of dfurman/llama-2-70b-dolphin-peft## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model dfurman/llama-2-70b-dolphin-peft on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-05T00:46:08.934942(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
cca332057514a5fe1d3cb728c6008c1679a5c5fd
# Dataset Card for Evaluation run of dfurman/llama-2-13b-dolphin-peft ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/dfurman/llama-2-13b-dolphin-peft - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [dfurman/llama-2-13b-dolphin-peft](https://huggingface.co/dfurman/llama-2-13b-dolphin-peft) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 5 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_dfurman__llama-2-13b-dolphin-peft", "harness_gsm8k_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-03T19:52:57.609518](https://huggingface.co/datasets/open-llm-leaderboard/details_dfurman__llama-2-13b-dolphin-peft/blob/main/results_2023-12-03T19-52-57.609518.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.0, "acc_stderr": 0.0 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_dfurman__llama-2-13b-dolphin-peft
[ "region:us" ]
2023-08-18T10:06:48+00:00
{"pretty_name": "Evaluation run of dfurman/llama-2-13b-dolphin-peft", "dataset_summary": "Dataset automatically created during the evaluation run of model [dfurman/llama-2-13b-dolphin-peft](https://huggingface.co/dfurman/llama-2-13b-dolphin-peft) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 5 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_dfurman__llama-2-13b-dolphin-peft\",\n\t\"harness_gsm8k_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-03T19:52:57.609518](https://huggingface.co/datasets/open-llm-leaderboard/details_dfurman__llama-2-13b-dolphin-peft/blob/main/results_2023-12-03T19-52-57.609518.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/dfurman/llama-2-13b-dolphin-peft", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|arc:challenge|25_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_09_18T05_24_57.817277", "path": ["**/details_harness|drop|3_2023-09-18T05-24-57.817277.parquet"]}, {"split": "2023_09_22T23_09_33.744166", "path": ["**/details_harness|drop|3_2023-09-22T23-09-33.744166.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-09-22T23-09-33.744166.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_09_18T05_24_57.817277", "path": ["**/details_harness|gsm8k|5_2023-09-18T05-24-57.817277.parquet"]}, {"split": "2023_09_22T23_09_33.744166", "path": ["**/details_harness|gsm8k|5_2023-09-22T23-09-33.744166.parquet"]}, {"split": "2023_12_03T19_26_24.364413", "path": ["**/details_harness|gsm8k|5_2023-12-03T19-26-24.364413.parquet"]}, {"split": "2023_12_03T19_52_57.609518", "path": ["**/details_harness|gsm8k|5_2023-12-03T19-52-57.609518.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-03T19-52-57.609518.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hellaswag|10_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-04T15:17:20.418762.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-04T15:17:20.418762.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-04T15:17:20.418762.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_09_18T05_24_57.817277", "path": ["**/details_harness|winogrande|5_2023-09-18T05-24-57.817277.parquet"]}, {"split": "2023_09_22T23_09_33.744166", "path": ["**/details_harness|winogrande|5_2023-09-22T23-09-33.744166.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-09-22T23-09-33.744166.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_04T15_17_20.418762", "path": ["results_2023-08-04T15:17:20.418762.parquet"]}, {"split": "2023_09_18T05_24_57.817277", "path": ["results_2023-09-18T05-24-57.817277.parquet"]}, {"split": "2023_09_22T23_09_33.744166", "path": ["results_2023-09-22T23-09-33.744166.parquet"]}, {"split": "2023_12_03T19_26_24.364413", "path": ["results_2023-12-03T19-26-24.364413.parquet"]}, {"split": "2023_12_03T19_52_57.609518", "path": ["results_2023-12-03T19-52-57.609518.parquet"]}, {"split": "latest", "path": ["results_2023-12-03T19-52-57.609518.parquet"]}]}]}
2023-12-03T19:53:04+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of dfurman/llama-2-13b-dolphin-peft ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model dfurman/llama-2-13b-dolphin-peft on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 5 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-03T19:52:57.609518(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of dfurman/llama-2-13b-dolphin-peft", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model dfurman/llama-2-13b-dolphin-peft on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 5 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-03T19:52:57.609518(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of dfurman/llama-2-13b-dolphin-peft", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model dfurman/llama-2-13b-dolphin-peft on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 5 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-03T19:52:57.609518(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 25, 31, 174, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of dfurman/llama-2-13b-dolphin-peft## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model dfurman/llama-2-13b-dolphin-peft on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 5 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-03T19:52:57.609518(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
c59716c9070eab7ef488b309c7fb0b01ae797063
# Dataset Card for Evaluation run of dfurman/llama-2-13b-guanaco-peft ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/dfurman/llama-2-13b-guanaco-peft - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [dfurman/llama-2-13b-guanaco-peft](https://huggingface.co/dfurman/llama-2-13b-guanaco-peft) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_dfurman__llama-2-13b-guanaco-peft", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-04T16:30:14.703149](https://huggingface.co/datasets/open-llm-leaderboard/details_dfurman__llama-2-13b-guanaco-peft/blob/main/results_2023-08-04T16%3A30%3A14.703149.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5586096517989027, "acc_stderr": 0.0342928910333228, "acc_norm": 0.5627889461047514, "acc_norm_stderr": 0.034271870197560256, "mc1": 0.29008567931456547, "mc1_stderr": 0.01588623687420952, "mc2": 0.42589160969882056, "mc2_stderr": 0.014357814512818864 }, "harness|arc:challenge|25": { "acc": 0.560580204778157, "acc_stderr": 0.014503747823580122, "acc_norm": 0.5998293515358362, "acc_norm_stderr": 0.014317197787809172 }, "harness|hellaswag|10": { "acc": 0.6170085640310695, "acc_stderr": 0.0048512275270709, "acc_norm": 0.8243377813184625, "acc_norm_stderr": 0.0037975482528516194 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.45185185185185184, "acc_stderr": 0.04299268905480863, "acc_norm": 0.45185185185185184, "acc_norm_stderr": 0.04299268905480863 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5197368421052632, "acc_stderr": 0.04065771002562605, "acc_norm": 0.5197368421052632, "acc_norm_stderr": 0.04065771002562605 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6075471698113207, "acc_stderr": 0.03005258057955785, "acc_norm": 0.6075471698113207, "acc_norm_stderr": 0.03005258057955785 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5902777777777778, "acc_stderr": 0.04112490974670787, "acc_norm": 0.5902777777777778, "acc_norm_stderr": 0.04112490974670787 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.44, "acc_stderr": 0.049888765156985884, "acc_norm": 0.44, "acc_norm_stderr": 0.049888765156985884 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.4913294797687861, "acc_stderr": 0.038118909889404126, "acc_norm": 0.4913294797687861, "acc_norm_stderr": 0.038118909889404126 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.2549019607843137, "acc_stderr": 0.04336432707993179, "acc_norm": 0.2549019607843137, "acc_norm_stderr": 0.04336432707993179 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.42127659574468085, "acc_stderr": 0.032278345101462685, "acc_norm": 0.42127659574468085, "acc_norm_stderr": 0.032278345101462685 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.30701754385964913, "acc_stderr": 0.0433913832257986, "acc_norm": 0.30701754385964913, "acc_norm_stderr": 0.0433913832257986 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5103448275862069, "acc_stderr": 0.04165774775728762, "acc_norm": 0.5103448275862069, "acc_norm_stderr": 0.04165774775728762 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.35185185185185186, "acc_stderr": 0.024594975128920938, "acc_norm": 0.35185185185185186, "acc_norm_stderr": 0.024594975128920938 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3253968253968254, "acc_stderr": 0.041905964388711366, "acc_norm": 0.3253968253968254, "acc_norm_stderr": 0.041905964388711366 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6774193548387096, "acc_stderr": 0.026593084516572267, "acc_norm": 0.6774193548387096, "acc_norm_stderr": 0.026593084516572267 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.43349753694581283, "acc_stderr": 0.03486731727419872, "acc_norm": 0.43349753694581283, "acc_norm_stderr": 0.03486731727419872 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.59, "acc_stderr": 0.049431107042371025, "acc_norm": 0.59, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6666666666666666, "acc_stderr": 0.0368105086916155, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.0368105086916155 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7070707070707071, "acc_stderr": 0.03242497958178815, "acc_norm": 0.7070707070707071, "acc_norm_stderr": 0.03242497958178815 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8341968911917098, "acc_stderr": 0.026839845022314415, "acc_norm": 0.8341968911917098, "acc_norm_stderr": 0.026839845022314415 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5102564102564102, "acc_stderr": 0.025345672221942374, "acc_norm": 0.5102564102564102, "acc_norm_stderr": 0.025345672221942374 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3037037037037037, "acc_stderr": 0.028037929969114986, "acc_norm": 0.3037037037037037, "acc_norm_stderr": 0.028037929969114986 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5588235294117647, "acc_stderr": 0.032252942323996406, "acc_norm": 0.5588235294117647, "acc_norm_stderr": 0.032252942323996406 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.36423841059602646, "acc_stderr": 0.03929111781242742, "acc_norm": 0.36423841059602646, "acc_norm_stderr": 0.03929111781242742 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7596330275229358, "acc_stderr": 0.01832060732096407, "acc_norm": 0.7596330275229358, "acc_norm_stderr": 0.01832060732096407 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4212962962962963, "acc_stderr": 0.03367462138896078, "acc_norm": 0.4212962962962963, "acc_norm_stderr": 0.03367462138896078 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7647058823529411, "acc_stderr": 0.029771775228145638, "acc_norm": 0.7647058823529411, "acc_norm_stderr": 0.029771775228145638 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7510548523206751, "acc_stderr": 0.028146970599422644, "acc_norm": 0.7510548523206751, "acc_norm_stderr": 0.028146970599422644 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6367713004484304, "acc_stderr": 0.03227790442850499, "acc_norm": 0.6367713004484304, "acc_norm_stderr": 0.03227790442850499 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6335877862595419, "acc_stderr": 0.04225875451969638, "acc_norm": 0.6335877862595419, "acc_norm_stderr": 0.04225875451969638 }, "harness|hendrycksTest-international_law|5": { "acc": 0.71900826446281, "acc_stderr": 0.041032038305145124, "acc_norm": 0.71900826446281, "acc_norm_stderr": 0.041032038305145124 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7129629629629629, "acc_stderr": 0.043733130409147614, "acc_norm": 0.7129629629629629, "acc_norm_stderr": 0.043733130409147614 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6687116564417178, "acc_stderr": 0.03697983910025588, "acc_norm": 0.6687116564417178, "acc_norm_stderr": 0.03697983910025588 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.23214285714285715, "acc_stderr": 0.04007341809755805, "acc_norm": 0.23214285714285715, "acc_norm_stderr": 0.04007341809755805 }, "harness|hendrycksTest-management|5": { "acc": 0.7669902912621359, "acc_stderr": 0.04185832598928315, "acc_norm": 0.7669902912621359, "acc_norm_stderr": 0.04185832598928315 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7948717948717948, "acc_stderr": 0.02645350805404033, "acc_norm": 0.7948717948717948, "acc_norm_stderr": 0.02645350805404033 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.58, "acc_stderr": 0.049604496374885836, "acc_norm": 0.58, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7509578544061303, "acc_stderr": 0.015464676163395958, "acc_norm": 0.7509578544061303, "acc_norm_stderr": 0.015464676163395958 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.638728323699422, "acc_stderr": 0.025862201852277895, "acc_norm": 0.638728323699422, "acc_norm_stderr": 0.025862201852277895 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.40670391061452515, "acc_stderr": 0.016428811915898865, "acc_norm": 0.40670391061452515, "acc_norm_stderr": 0.016428811915898865 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6372549019607843, "acc_stderr": 0.027530078447110307, "acc_norm": 0.6372549019607843, "acc_norm_stderr": 0.027530078447110307 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6366559485530546, "acc_stderr": 0.02731684767419271, "acc_norm": 0.6366559485530546, "acc_norm_stderr": 0.02731684767419271 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6327160493827161, "acc_stderr": 0.026822801759507887, "acc_norm": 0.6327160493827161, "acc_norm_stderr": 0.026822801759507887 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4078014184397163, "acc_stderr": 0.029316011776343555, "acc_norm": 0.4078014184397163, "acc_norm_stderr": 0.029316011776343555 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4172099087353325, "acc_stderr": 0.01259395999290642, "acc_norm": 0.4172099087353325, "acc_norm_stderr": 0.01259395999290642 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5441176470588235, "acc_stderr": 0.030254372573976715, "acc_norm": 0.5441176470588235, "acc_norm_stderr": 0.030254372573976715 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5359477124183006, "acc_stderr": 0.02017548876548404, "acc_norm": 0.5359477124183006, "acc_norm_stderr": 0.02017548876548404 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6454545454545455, "acc_stderr": 0.045820048415054174, "acc_norm": 0.6454545454545455, "acc_norm_stderr": 0.045820048415054174 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6326530612244898, "acc_stderr": 0.030862144921087555, "acc_norm": 0.6326530612244898, "acc_norm_stderr": 0.030862144921087555 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7512437810945274, "acc_stderr": 0.030567675938916714, "acc_norm": 0.7512437810945274, "acc_norm_stderr": 0.030567675938916714 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.82, "acc_stderr": 0.03861229196653693, "acc_norm": 0.82, "acc_norm_stderr": 0.03861229196653693 }, "harness|hendrycksTest-virology|5": { "acc": 0.42771084337349397, "acc_stderr": 0.038515976837185335, "acc_norm": 0.42771084337349397, "acc_norm_stderr": 0.038515976837185335 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7660818713450293, "acc_stderr": 0.03246721765117826, "acc_norm": 0.7660818713450293, "acc_norm_stderr": 0.03246721765117826 }, "harness|truthfulqa:mc|0": { "mc1": 0.29008567931456547, "mc1_stderr": 0.01588623687420952, "mc2": 0.42589160969882056, "mc2_stderr": 0.014357814512818864 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_dfurman__llama-2-13b-guanaco-peft
[ "region:us" ]
2023-08-18T10:06:56+00:00
{"pretty_name": "Evaluation run of dfurman/llama-2-13b-guanaco-peft", "dataset_summary": "Dataset automatically created during the evaluation run of model [dfurman/llama-2-13b-guanaco-peft](https://huggingface.co/dfurman/llama-2-13b-guanaco-peft) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_dfurman__llama-2-13b-guanaco-peft\",\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-08-04T16:30:14.703149](https://huggingface.co/datasets/open-llm-leaderboard/details_dfurman__llama-2-13b-guanaco-peft/blob/main/results_2023-08-04T16%3A30%3A14.703149.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5586096517989027,\n \"acc_stderr\": 0.0342928910333228,\n \"acc_norm\": 0.5627889461047514,\n \"acc_norm_stderr\": 0.034271870197560256,\n \"mc1\": 0.29008567931456547,\n \"mc1_stderr\": 0.01588623687420952,\n \"mc2\": 0.42589160969882056,\n \"mc2_stderr\": 0.014357814512818864\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.560580204778157,\n \"acc_stderr\": 0.014503747823580122,\n \"acc_norm\": 0.5998293515358362,\n \"acc_norm_stderr\": 0.014317197787809172\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6170085640310695,\n \"acc_stderr\": 0.0048512275270709,\n \"acc_norm\": 0.8243377813184625,\n \"acc_norm_stderr\": 0.0037975482528516194\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.45185185185185184,\n \"acc_stderr\": 0.04299268905480863,\n \"acc_norm\": 0.45185185185185184,\n \"acc_norm_stderr\": 0.04299268905480863\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.5197368421052632,\n \"acc_stderr\": 0.04065771002562605,\n \"acc_norm\": 0.5197368421052632,\n \"acc_norm_stderr\": 0.04065771002562605\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6075471698113207,\n \"acc_stderr\": 0.03005258057955785,\n \"acc_norm\": 0.6075471698113207,\n \"acc_norm_stderr\": 0.03005258057955785\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5902777777777778,\n \"acc_stderr\": 0.04112490974670787,\n \"acc_norm\": 0.5902777777777778,\n \"acc_norm_stderr\": 0.04112490974670787\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.049888765156985884,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.049888765156985884\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.4913294797687861,\n \"acc_stderr\": 0.038118909889404126,\n \"acc_norm\": 0.4913294797687861,\n \"acc_norm_stderr\": 0.038118909889404126\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.2549019607843137,\n \"acc_stderr\": 0.04336432707993179,\n \"acc_norm\": 0.2549019607843137,\n \"acc_norm_stderr\": 0.04336432707993179\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.42127659574468085,\n \"acc_stderr\": 0.032278345101462685,\n \"acc_norm\": 0.42127659574468085,\n \"acc_norm_stderr\": 0.032278345101462685\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.30701754385964913,\n \"acc_stderr\": 0.0433913832257986,\n \"acc_norm\": 0.30701754385964913,\n \"acc_norm_stderr\": 0.0433913832257986\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5103448275862069,\n \"acc_stderr\": 0.04165774775728762,\n \"acc_norm\": 0.5103448275862069,\n \"acc_norm_stderr\": 0.04165774775728762\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.35185185185185186,\n \"acc_stderr\": 0.024594975128920938,\n \"acc_norm\": 0.35185185185185186,\n \"acc_norm_stderr\": 0.024594975128920938\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3253968253968254,\n \"acc_stderr\": 0.041905964388711366,\n \"acc_norm\": 0.3253968253968254,\n \"acc_norm_stderr\": 0.041905964388711366\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6774193548387096,\n \"acc_stderr\": 0.026593084516572267,\n \"acc_norm\": 0.6774193548387096,\n \"acc_norm_stderr\": 0.026593084516572267\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.43349753694581283,\n \"acc_stderr\": 0.03486731727419872,\n \"acc_norm\": 0.43349753694581283,\n \"acc_norm_stderr\": 0.03486731727419872\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.59,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.59,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.0368105086916155,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.0368105086916155\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7070707070707071,\n \"acc_stderr\": 0.03242497958178815,\n \"acc_norm\": 0.7070707070707071,\n \"acc_norm_stderr\": 0.03242497958178815\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8341968911917098,\n \"acc_stderr\": 0.026839845022314415,\n \"acc_norm\": 0.8341968911917098,\n \"acc_norm_stderr\": 0.026839845022314415\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5102564102564102,\n \"acc_stderr\": 0.025345672221942374,\n \"acc_norm\": 0.5102564102564102,\n \"acc_norm_stderr\": 0.025345672221942374\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3037037037037037,\n \"acc_stderr\": 0.028037929969114986,\n \"acc_norm\": 0.3037037037037037,\n \"acc_norm_stderr\": 0.028037929969114986\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.5588235294117647,\n \"acc_stderr\": 0.032252942323996406,\n \"acc_norm\": 0.5588235294117647,\n \"acc_norm_stderr\": 0.032252942323996406\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.36423841059602646,\n \"acc_stderr\": 0.03929111781242742,\n \"acc_norm\": 0.36423841059602646,\n \"acc_norm_stderr\": 0.03929111781242742\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7596330275229358,\n \"acc_stderr\": 0.01832060732096407,\n \"acc_norm\": 0.7596330275229358,\n \"acc_norm_stderr\": 0.01832060732096407\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4212962962962963,\n \"acc_stderr\": 0.03367462138896078,\n \"acc_norm\": 0.4212962962962963,\n \"acc_norm_stderr\": 0.03367462138896078\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7647058823529411,\n \"acc_stderr\": 0.029771775228145638,\n \"acc_norm\": 0.7647058823529411,\n \"acc_norm_stderr\": 0.029771775228145638\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7510548523206751,\n \"acc_stderr\": 0.028146970599422644,\n \"acc_norm\": 0.7510548523206751,\n \"acc_norm_stderr\": 0.028146970599422644\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6367713004484304,\n \"acc_stderr\": 0.03227790442850499,\n \"acc_norm\": 0.6367713004484304,\n \"acc_norm_stderr\": 0.03227790442850499\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.6335877862595419,\n \"acc_stderr\": 0.04225875451969638,\n \"acc_norm\": 0.6335877862595419,\n \"acc_norm_stderr\": 0.04225875451969638\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.71900826446281,\n \"acc_stderr\": 0.041032038305145124,\n \"acc_norm\": 0.71900826446281,\n \"acc_norm_stderr\": 0.041032038305145124\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7129629629629629,\n \"acc_stderr\": 0.043733130409147614,\n \"acc_norm\": 0.7129629629629629,\n \"acc_norm_stderr\": 0.043733130409147614\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6687116564417178,\n \"acc_stderr\": 0.03697983910025588,\n \"acc_norm\": 0.6687116564417178,\n \"acc_norm_stderr\": 0.03697983910025588\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.23214285714285715,\n \"acc_stderr\": 0.04007341809755805,\n \"acc_norm\": 0.23214285714285715,\n \"acc_norm_stderr\": 0.04007341809755805\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7669902912621359,\n \"acc_stderr\": 0.04185832598928315,\n \"acc_norm\": 0.7669902912621359,\n \"acc_norm_stderr\": 0.04185832598928315\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7948717948717948,\n \"acc_stderr\": 0.02645350805404033,\n \"acc_norm\": 0.7948717948717948,\n \"acc_norm_stderr\": 0.02645350805404033\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.58,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.58,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7509578544061303,\n \"acc_stderr\": 0.015464676163395958,\n \"acc_norm\": 0.7509578544061303,\n \"acc_norm_stderr\": 0.015464676163395958\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.638728323699422,\n \"acc_stderr\": 0.025862201852277895,\n \"acc_norm\": 0.638728323699422,\n \"acc_norm_stderr\": 0.025862201852277895\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.40670391061452515,\n \"acc_stderr\": 0.016428811915898865,\n \"acc_norm\": 0.40670391061452515,\n \"acc_norm_stderr\": 0.016428811915898865\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6372549019607843,\n \"acc_stderr\": 0.027530078447110307,\n \"acc_norm\": 0.6372549019607843,\n \"acc_norm_stderr\": 0.027530078447110307\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6366559485530546,\n \"acc_stderr\": 0.02731684767419271,\n \"acc_norm\": 0.6366559485530546,\n \"acc_norm_stderr\": 0.02731684767419271\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6327160493827161,\n \"acc_stderr\": 0.026822801759507887,\n \"acc_norm\": 0.6327160493827161,\n \"acc_norm_stderr\": 0.026822801759507887\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4078014184397163,\n \"acc_stderr\": 0.029316011776343555,\n \"acc_norm\": 0.4078014184397163,\n \"acc_norm_stderr\": 0.029316011776343555\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4172099087353325,\n \"acc_stderr\": 0.01259395999290642,\n \"acc_norm\": 0.4172099087353325,\n \"acc_norm_stderr\": 0.01259395999290642\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5441176470588235,\n \"acc_stderr\": 0.030254372573976715,\n \"acc_norm\": 0.5441176470588235,\n \"acc_norm_stderr\": 0.030254372573976715\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.5359477124183006,\n \"acc_stderr\": 0.02017548876548404,\n \"acc_norm\": 0.5359477124183006,\n \"acc_norm_stderr\": 0.02017548876548404\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6454545454545455,\n \"acc_stderr\": 0.045820048415054174,\n \"acc_norm\": 0.6454545454545455,\n \"acc_norm_stderr\": 0.045820048415054174\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6326530612244898,\n \"acc_stderr\": 0.030862144921087555,\n \"acc_norm\": 0.6326530612244898,\n \"acc_norm_stderr\": 0.030862144921087555\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7512437810945274,\n \"acc_stderr\": 0.030567675938916714,\n \"acc_norm\": 0.7512437810945274,\n \"acc_norm_stderr\": 0.030567675938916714\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.82,\n \"acc_stderr\": 0.03861229196653693,\n \"acc_norm\": 0.82,\n \"acc_norm_stderr\": 0.03861229196653693\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.42771084337349397,\n \"acc_stderr\": 0.038515976837185335,\n \"acc_norm\": 0.42771084337349397,\n \"acc_norm_stderr\": 0.038515976837185335\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7660818713450293,\n \"acc_stderr\": 0.03246721765117826,\n \"acc_norm\": 0.7660818713450293,\n \"acc_norm_stderr\": 0.03246721765117826\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.29008567931456547,\n \"mc1_stderr\": 0.01588623687420952,\n \"mc2\": 0.42589160969882056,\n \"mc2_stderr\": 0.014357814512818864\n }\n}\n```", "repo_url": "https://huggingface.co/dfurman/llama-2-13b-guanaco-peft", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|arc:challenge|25_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hellaswag|10_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-04T16:30:14.703149.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-04T16:30:14.703149.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_04T16_30_14.703149", "path": ["results_2023-08-04T16:30:14.703149.parquet"]}, {"split": "latest", "path": ["results_2023-08-04T16:30:14.703149.parquet"]}]}]}
2023-08-27T11:30:18+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of dfurman/llama-2-13b-guanaco-peft ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model dfurman/llama-2-13b-guanaco-peft on the Open LLM Leaderboard. The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-08-04T16:30:14.703149 (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of dfurman/llama-2-13b-guanaco-peft", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model dfurman/llama-2-13b-guanaco-peft on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-08-04T16:30:14.703149 (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of dfurman/llama-2-13b-guanaco-peft", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model dfurman/llama-2-13b-guanaco-peft on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-08-04T16:30:14.703149 (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 25, 31, 173, 68, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of dfurman/llama-2-13b-guanaco-peft## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model dfurman/llama-2-13b-guanaco-peft on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-08-04T16:30:14.703149 (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
a82dc603fb0355dd1e67f504f028b3c8275626c8
# Dataset Card for Evaluation run of lmsys/vicuna-7b-v1.3 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/lmsys/vicuna-7b-v1.3 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [lmsys/vicuna-7b-v1.3](https://huggingface.co/lmsys/vicuna-7b-v1.3) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_lmsys__vicuna-7b-v1.3", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-21T23:45:21.646720](https://huggingface.co/datasets/open-llm-leaderboard/details_lmsys__vicuna-7b-v1.3/blob/main/results_2023-10-21T23-45-21.646720.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.12730704697986578, "em_stderr": 0.003413474068983651, "f1": 0.17891254194630765, "f1_stderr": 0.0035073277688968674, "acc": 0.38083789051163464, "acc_stderr": 0.0095991004919272 }, "harness|drop|3": { "em": 0.12730704697986578, "em_stderr": 0.003413474068983651, "f1": 0.17891254194630765, "f1_stderr": 0.0035073277688968674 }, "harness|gsm8k|5": { "acc": 0.05686125852918878, "acc_stderr": 0.0063787902420996325 }, "harness|winogrande|5": { "acc": 0.7048145224940805, "acc_stderr": 0.012819410741754765 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_lmsys__vicuna-7b-v1.3
[ "region:us" ]
2023-08-18T10:07:05+00:00
{"pretty_name": "Evaluation run of lmsys/vicuna-7b-v1.3", "dataset_summary": "Dataset automatically created during the evaluation run of model [lmsys/vicuna-7b-v1.3](https://huggingface.co/lmsys/vicuna-7b-v1.3) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_lmsys__vicuna-7b-v1.3\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-21T23:45:21.646720](https://huggingface.co/datasets/open-llm-leaderboard/details_lmsys__vicuna-7b-v1.3/blob/main/results_2023-10-21T23-45-21.646720.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.12730704697986578,\n \"em_stderr\": 0.003413474068983651,\n \"f1\": 0.17891254194630765,\n \"f1_stderr\": 0.0035073277688968674,\n \"acc\": 0.38083789051163464,\n \"acc_stderr\": 0.0095991004919272\n },\n \"harness|drop|3\": {\n \"em\": 0.12730704697986578,\n \"em_stderr\": 0.003413474068983651,\n \"f1\": 0.17891254194630765,\n \"f1_stderr\": 0.0035073277688968674\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.05686125852918878,\n \"acc_stderr\": 0.0063787902420996325\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7048145224940805,\n \"acc_stderr\": 0.012819410741754765\n }\n}\n```", "repo_url": "https://huggingface.co/lmsys/vicuna-7b-v1.3", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|arc:challenge|25_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_21T23_45_21.646720", "path": ["**/details_harness|drop|3_2023-10-21T23-45-21.646720.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-21T23-45-21.646720.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_21T23_45_21.646720", "path": ["**/details_harness|gsm8k|5_2023-10-21T23-45-21.646720.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-21T23-45-21.646720.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hellaswag|10_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T16:22:02.219224.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T16:22:02.219224.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T16:22:02.219224.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_21T23_45_21.646720", "path": ["**/details_harness|winogrande|5_2023-10-21T23-45-21.646720.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-21T23-45-21.646720.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_19T16_22_02.219224", "path": ["results_2023-07-19T16:22:02.219224.parquet"]}, {"split": "2023_10_21T23_45_21.646720", "path": ["results_2023-10-21T23-45-21.646720.parquet"]}, {"split": "latest", "path": ["results_2023-10-21T23-45-21.646720.parquet"]}]}]}
2023-10-21T22:45:34+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of lmsys/vicuna-7b-v1.3 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model lmsys/vicuna-7b-v1.3 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-21T23:45:21.646720(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of lmsys/vicuna-7b-v1.3", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model lmsys/vicuna-7b-v1.3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-21T23:45:21.646720(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of lmsys/vicuna-7b-v1.3", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model lmsys/vicuna-7b-v1.3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-21T23:45:21.646720(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 20, 31, 168, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of lmsys/vicuna-7b-v1.3## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model lmsys/vicuna-7b-v1.3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-21T23:45:21.646720(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
5fc833a834039572141efea78ad8b88dc9fd8d0b
# Dataset Card for Evaluation run of lmsys/vicuna-7b-delta-v1.1 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/lmsys/vicuna-7b-delta-v1.1 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [lmsys/vicuna-7b-delta-v1.1](https://huggingface.co/lmsys/vicuna-7b-delta-v1.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_lmsys__vicuna-7b-delta-v1.1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-12T14:40:56.820234](https://huggingface.co/datasets/open-llm-leaderboard/details_lmsys__vicuna-7b-delta-v1.1/blob/main/results_2023-10-12T14-40-56.820234.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.11388422818791946, "em_stderr": 0.00325324428862373, "f1": 0.16976719798657605, "f1_stderr": 0.003380156230610554, "acc": 0.38244753834582057, "acc_stderr": 0.009528517622122097 }, "harness|drop|3": { "em": 0.11388422818791946, "em_stderr": 0.00325324428862373, "f1": 0.16976719798657605, "f1_stderr": 0.003380156230610554 }, "harness|gsm8k|5": { "acc": 0.05534495830174375, "acc_stderr": 0.006298221796179588 }, "harness|winogrande|5": { "acc": 0.7095501183898973, "acc_stderr": 0.012758813448064607 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_lmsys__vicuna-7b-delta-v1.1
[ "region:us" ]
2023-08-18T10:07:13+00:00
{"pretty_name": "Evaluation run of lmsys/vicuna-7b-delta-v1.1", "dataset_summary": "Dataset automatically created during the evaluation run of model [lmsys/vicuna-7b-delta-v1.1](https://huggingface.co/lmsys/vicuna-7b-delta-v1.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_lmsys__vicuna-7b-delta-v1.1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-12T14:40:56.820234](https://huggingface.co/datasets/open-llm-leaderboard/details_lmsys__vicuna-7b-delta-v1.1/blob/main/results_2023-10-12T14-40-56.820234.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.11388422818791946,\n \"em_stderr\": 0.00325324428862373,\n \"f1\": 0.16976719798657605,\n \"f1_stderr\": 0.003380156230610554,\n \"acc\": 0.38244753834582057,\n \"acc_stderr\": 0.009528517622122097\n },\n \"harness|drop|3\": {\n \"em\": 0.11388422818791946,\n \"em_stderr\": 0.00325324428862373,\n \"f1\": 0.16976719798657605,\n \"f1_stderr\": 0.003380156230610554\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.05534495830174375,\n \"acc_stderr\": 0.006298221796179588\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7095501183898973,\n \"acc_stderr\": 0.012758813448064607\n }\n}\n```", "repo_url": "https://huggingface.co/lmsys/vicuna-7b-delta-v1.1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|arc:challenge|25_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|arc:challenge|25_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_12T14_40_56.820234", "path": ["**/details_harness|drop|3_2023-10-12T14-40-56.820234.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-12T14-40-56.820234.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_12T14_40_56.820234", "path": ["**/details_harness|gsm8k|5_2023-10-12T14-40-56.820234.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-12T14-40-56.820234.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hellaswag|10_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hellaswag|10_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-18T12:22:17.969682.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-03T12:35:58.134991.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-03T12:35:58.134991.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-03T12:35:58.134991.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_12T14_40_56.820234", "path": ["**/details_harness|winogrande|5_2023-10-12T14-40-56.820234.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-12T14-40-56.820234.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_18T12_22_17.969682", "path": ["results_2023-07-18T12:22:17.969682.parquet"]}, {"split": "2023_08_03T12_35_58.134991", "path": ["results_2023-08-03T12:35:58.134991.parquet"]}, {"split": "2023_10_12T14_40_56.820234", "path": ["results_2023-10-12T14-40-56.820234.parquet"]}, {"split": "latest", "path": ["results_2023-10-12T14-40-56.820234.parquet"]}]}]}
2023-10-12T13:41:09+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of lmsys/vicuna-7b-delta-v1.1 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model lmsys/vicuna-7b-delta-v1.1 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-12T14:40:56.820234(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of lmsys/vicuna-7b-delta-v1.1", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model lmsys/vicuna-7b-delta-v1.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-12T14:40:56.820234(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of lmsys/vicuna-7b-delta-v1.1", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model lmsys/vicuna-7b-delta-v1.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-12T14:40:56.820234(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 23, 31, 171, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of lmsys/vicuna-7b-delta-v1.1## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model lmsys/vicuna-7b-delta-v1.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-12T14:40:56.820234(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
1505a61aa56206dc93c03c57d19cceca514c74ce
# Dataset Card for Evaluation run of lmsys/vicuna-13b-v1.5 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/lmsys/vicuna-13b-v1.5 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [lmsys/vicuna-13b-v1.5](https://huggingface.co/lmsys/vicuna-13b-v1.5) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_lmsys__vicuna-13b-v1.5", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-15T01:22:33.237446](https://huggingface.co/datasets/open-llm-leaderboard/details_lmsys__vicuna-13b-v1.5/blob/main/results_2023-10-15T01-22-33.237446.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.21403104026845637, "em_stderr": 0.004200304057589016, "f1": 0.2773447986577177, "f1_stderr": 0.004194161726605588, "acc": 0.4298049932592257, "acc_stderr": 0.010471546731533343 }, "harness|drop|3": { "em": 0.21403104026845637, "em_stderr": 0.004200304057589016, "f1": 0.2773447986577177, "f1_stderr": 0.004194161726605588 }, "harness|gsm8k|5": { "acc": 0.11296436694465505, "acc_stderr": 0.008719339028833057 }, "harness|winogrande|5": { "acc": 0.7466456195737964, "acc_stderr": 0.01222375443423363 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_lmsys__vicuna-13b-v1.5
[ "region:us" ]
2023-08-18T10:07:30+00:00
{"pretty_name": "Evaluation run of lmsys/vicuna-13b-v1.5", "dataset_summary": "Dataset automatically created during the evaluation run of model [lmsys/vicuna-13b-v1.5](https://huggingface.co/lmsys/vicuna-13b-v1.5) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_lmsys__vicuna-13b-v1.5\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-15T01:22:33.237446](https://huggingface.co/datasets/open-llm-leaderboard/details_lmsys__vicuna-13b-v1.5/blob/main/results_2023-10-15T01-22-33.237446.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.21403104026845637,\n \"em_stderr\": 0.004200304057589016,\n \"f1\": 0.2773447986577177,\n \"f1_stderr\": 0.004194161726605588,\n \"acc\": 0.4298049932592257,\n \"acc_stderr\": 0.010471546731533343\n },\n \"harness|drop|3\": {\n \"em\": 0.21403104026845637,\n \"em_stderr\": 0.004200304057589016,\n \"f1\": 0.2773447986577177,\n \"f1_stderr\": 0.004194161726605588\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.11296436694465505,\n \"acc_stderr\": 0.008719339028833057\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7466456195737964,\n \"acc_stderr\": 0.01222375443423363\n }\n}\n```", "repo_url": "https://huggingface.co/lmsys/vicuna-13b-v1.5", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|arc:challenge|25_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_15T01_22_33.237446", "path": ["**/details_harness|drop|3_2023-10-15T01-22-33.237446.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-15T01-22-33.237446.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_15T01_22_33.237446", "path": ["**/details_harness|gsm8k|5_2023-10-15T01-22-33.237446.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-15T01-22-33.237446.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hellaswag|10_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-09T10:24:27.985087.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-09T10:24:27.985087.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-09T10:24:27.985087.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_15T01_22_33.237446", "path": ["**/details_harness|winogrande|5_2023-10-15T01-22-33.237446.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-15T01-22-33.237446.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_09T10_24_27.985087", "path": ["results_2023-08-09T10:24:27.985087.parquet"]}, {"split": "2023_10_15T01_22_33.237446", "path": ["results_2023-10-15T01-22-33.237446.parquet"]}, {"split": "latest", "path": ["results_2023-10-15T01-22-33.237446.parquet"]}]}]}
2023-10-15T00:22:46+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of lmsys/vicuna-13b-v1.5 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model lmsys/vicuna-13b-v1.5 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-15T01:22:33.237446(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of lmsys/vicuna-13b-v1.5", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model lmsys/vicuna-13b-v1.5 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-15T01:22:33.237446(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of lmsys/vicuna-13b-v1.5", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model lmsys/vicuna-13b-v1.5 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-15T01:22:33.237446(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 20, 31, 168, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of lmsys/vicuna-13b-v1.5## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model lmsys/vicuna-13b-v1.5 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-15T01:22:33.237446(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
e6c3c485229c8a3a5fe3353c43342ee4e078fccd
# Dataset Card for Evaluation run of lmsys/vicuna-13b-v1.5-16k ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/lmsys/vicuna-13b-v1.5-16k - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [lmsys/vicuna-13b-v1.5-16k](https://huggingface.co/lmsys/vicuna-13b-v1.5-16k) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_lmsys__vicuna-13b-v1.5-16k", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-15T19:40:01.175892](https://huggingface.co/datasets/open-llm-leaderboard/details_lmsys__vicuna-13b-v1.5-16k/blob/main/results_2023-10-15T19-40-01.175892.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.10056627516778524, "em_stderr": 0.003079997879762969, "f1": 0.1662111996644288, "f1_stderr": 0.003270306636878169, "acc": 0.4274584378756718, "acc_stderr": 0.010932657125643012 }, "harness|drop|3": { "em": 0.10056627516778524, "em_stderr": 0.003079997879762969, "f1": 0.1662111996644288, "f1_stderr": 0.003270306636878169 }, "harness|gsm8k|5": { "acc": 0.13115996967399546, "acc_stderr": 0.009298499235587863 }, "harness|winogrande|5": { "acc": 0.7237569060773481, "acc_stderr": 0.012566815015698158 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_lmsys__vicuna-13b-v1.5-16k
[ "region:us" ]
2023-08-18T10:07:39+00:00
{"pretty_name": "Evaluation run of lmsys/vicuna-13b-v1.5-16k", "dataset_summary": "Dataset automatically created during the evaluation run of model [lmsys/vicuna-13b-v1.5-16k](https://huggingface.co/lmsys/vicuna-13b-v1.5-16k) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_lmsys__vicuna-13b-v1.5-16k\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-15T19:40:01.175892](https://huggingface.co/datasets/open-llm-leaderboard/details_lmsys__vicuna-13b-v1.5-16k/blob/main/results_2023-10-15T19-40-01.175892.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.10056627516778524,\n \"em_stderr\": 0.003079997879762969,\n \"f1\": 0.1662111996644288,\n \"f1_stderr\": 0.003270306636878169,\n \"acc\": 0.4274584378756718,\n \"acc_stderr\": 0.010932657125643012\n },\n \"harness|drop|3\": {\n \"em\": 0.10056627516778524,\n \"em_stderr\": 0.003079997879762969,\n \"f1\": 0.1662111996644288,\n \"f1_stderr\": 0.003270306636878169\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.13115996967399546,\n \"acc_stderr\": 0.009298499235587863\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7237569060773481,\n \"acc_stderr\": 0.012566815015698158\n }\n}\n```", "repo_url": "https://huggingface.co/lmsys/vicuna-13b-v1.5-16k", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|arc:challenge|25_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_09_17T04_03_07.981708", "path": ["**/details_harness|drop|3_2023-09-17T04-03-07.981708.parquet"]}, {"split": "2023_10_15T19_40_01.175892", "path": ["**/details_harness|drop|3_2023-10-15T19-40-01.175892.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-15T19-40-01.175892.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_09_17T04_03_07.981708", "path": ["**/details_harness|gsm8k|5_2023-09-17T04-03-07.981708.parquet"]}, {"split": "2023_10_15T19_40_01.175892", "path": ["**/details_harness|gsm8k|5_2023-10-15T19-40-01.175892.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-15T19-40-01.175892.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hellaswag|10_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-09T10:54:51.508429.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-09T10:54:51.508429.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-09T10:54:51.508429.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_09_17T04_03_07.981708", "path": ["**/details_harness|winogrande|5_2023-09-17T04-03-07.981708.parquet"]}, {"split": "2023_10_15T19_40_01.175892", "path": ["**/details_harness|winogrande|5_2023-10-15T19-40-01.175892.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-15T19-40-01.175892.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_09T10_54_51.508429", "path": ["results_2023-08-09T10:54:51.508429.parquet"]}, {"split": "2023_09_17T04_03_07.981708", "path": ["results_2023-09-17T04-03-07.981708.parquet"]}, {"split": "2023_10_15T19_40_01.175892", "path": ["results_2023-10-15T19-40-01.175892.parquet"]}, {"split": "latest", "path": ["results_2023-10-15T19-40-01.175892.parquet"]}]}]}
2023-10-15T18:40:13+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of lmsys/vicuna-13b-v1.5-16k ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model lmsys/vicuna-13b-v1.5-16k on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-15T19:40:01.175892(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of lmsys/vicuna-13b-v1.5-16k", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model lmsys/vicuna-13b-v1.5-16k on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-15T19:40:01.175892(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of lmsys/vicuna-13b-v1.5-16k", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model lmsys/vicuna-13b-v1.5-16k on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-15T19:40:01.175892(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 22, 31, 170, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of lmsys/vicuna-13b-v1.5-16k## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model lmsys/vicuna-13b-v1.5-16k on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-15T19:40:01.175892(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
dc88a320c9a5981812b204910aaf20e99ece677e
# Dataset Card for Evaluation run of lmsys/vicuna-7b-v1.5 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/lmsys/vicuna-7b-v1.5 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [lmsys/vicuna-7b-v1.5](https://huggingface.co/lmsys/vicuna-7b-v1.5) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_lmsys__vicuna-7b-v1.5", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-21T21:05:37.153515](https://huggingface.co/datasets/open-llm-leaderboard/details_lmsys__vicuna-7b-v1.5/blob/main/results_2023-10-21T21-05-37.153515.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.017932046979865772, "em_stderr": 0.0013590184569504276, "f1": 0.08961094798657747, "f1_stderr": 0.002014243406072028, "acc": 0.4016346602057357, "acc_stderr": 0.010076117588605417 }, "harness|drop|3": { "em": 0.017932046979865772, "em_stderr": 0.0013590184569504276, "f1": 0.08961094798657747, "f1_stderr": 0.002014243406072028 }, "harness|gsm8k|5": { "acc": 0.08188021228203184, "acc_stderr": 0.007552338527716956 }, "harness|winogrande|5": { "acc": 0.7213891081294396, "acc_stderr": 0.012599896649493878 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_lmsys__vicuna-7b-v1.5
[ "region:us" ]
2023-08-18T10:07:47+00:00
{"pretty_name": "Evaluation run of lmsys/vicuna-7b-v1.5", "dataset_summary": "Dataset automatically created during the evaluation run of model [lmsys/vicuna-7b-v1.5](https://huggingface.co/lmsys/vicuna-7b-v1.5) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_lmsys__vicuna-7b-v1.5\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-21T21:05:37.153515](https://huggingface.co/datasets/open-llm-leaderboard/details_lmsys__vicuna-7b-v1.5/blob/main/results_2023-10-21T21-05-37.153515.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.017932046979865772,\n \"em_stderr\": 0.0013590184569504276,\n \"f1\": 0.08961094798657747,\n \"f1_stderr\": 0.002014243406072028,\n \"acc\": 0.4016346602057357,\n \"acc_stderr\": 0.010076117588605417\n },\n \"harness|drop|3\": {\n \"em\": 0.017932046979865772,\n \"em_stderr\": 0.0013590184569504276,\n \"f1\": 0.08961094798657747,\n \"f1_stderr\": 0.002014243406072028\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.08188021228203184,\n \"acc_stderr\": 0.007552338527716956\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7213891081294396,\n \"acc_stderr\": 0.012599896649493878\n }\n}\n```", "repo_url": "https://huggingface.co/lmsys/vicuna-7b-v1.5", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|arc:challenge|25_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_19T12_56_49.814418", "path": ["**/details_harness|drop|3_2023-10-19T12-56-49.814418.parquet"]}, {"split": "2023_10_21T21_05_37.153515", "path": ["**/details_harness|drop|3_2023-10-21T21-05-37.153515.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-21T21-05-37.153515.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_19T12_56_49.814418", "path": ["**/details_harness|gsm8k|5_2023-10-19T12-56-49.814418.parquet"]}, {"split": "2023_10_21T21_05_37.153515", "path": ["**/details_harness|gsm8k|5_2023-10-21T21-05-37.153515.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-21T21-05-37.153515.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hellaswag|10_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T12:09:52.202468.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T12:09:52.202468.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T12:09:52.202468.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_19T12_56_49.814418", "path": ["**/details_harness|winogrande|5_2023-10-19T12-56-49.814418.parquet"]}, {"split": "2023_10_21T21_05_37.153515", "path": ["**/details_harness|winogrande|5_2023-10-21T21-05-37.153515.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-21T21-05-37.153515.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_17T12_09_52.202468", "path": ["results_2023-08-17T12:09:52.202468.parquet"]}, {"split": "2023_10_19T12_56_49.814418", "path": ["results_2023-10-19T12-56-49.814418.parquet"]}, {"split": "2023_10_21T21_05_37.153515", "path": ["results_2023-10-21T21-05-37.153515.parquet"]}, {"split": "latest", "path": ["results_2023-10-21T21-05-37.153515.parquet"]}]}]}
2023-10-21T20:05:49+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of lmsys/vicuna-7b-v1.5 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model lmsys/vicuna-7b-v1.5 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-21T21:05:37.153515(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of lmsys/vicuna-7b-v1.5", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model lmsys/vicuna-7b-v1.5 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-21T21:05:37.153515(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of lmsys/vicuna-7b-v1.5", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model lmsys/vicuna-7b-v1.5 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-21T21:05:37.153515(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 20, 31, 168, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of lmsys/vicuna-7b-v1.5## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model lmsys/vicuna-7b-v1.5 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-21T21:05:37.153515(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
86b834e44e6e6d2e65cca24b7aab4a3755dcd77d
# Dataset Card for Evaluation run of lmsys/vicuna-13b-delta-v1.1 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/lmsys/vicuna-13b-delta-v1.1 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [lmsys/vicuna-13b-delta-v1.1](https://huggingface.co/lmsys/vicuna-13b-delta-v1.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_lmsys__vicuna-13b-delta-v1.1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-15T01:08:28.520609](https://huggingface.co/datasets/open-llm-leaderboard/details_lmsys__vicuna-13b-delta-v1.1/blob/main/results_2023-10-15T01-08-28.520609.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.029677013422818792, "em_stderr": 0.0017378324714143493, "f1": 0.09310612416107406, "f1_stderr": 0.002167792401176146, "acc": 0.4141695683211732, "acc_stderr": 0.010019161585538096 }, "harness|drop|3": { "em": 0.029677013422818792, "em_stderr": 0.0017378324714143493, "f1": 0.09310612416107406, "f1_stderr": 0.002167792401176146 }, "harness|gsm8k|5": { "acc": 0.08642911296436695, "acc_stderr": 0.00774004433710381 }, "harness|winogrande|5": { "acc": 0.7419100236779794, "acc_stderr": 0.012298278833972384 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_lmsys__vicuna-13b-delta-v1.1
[ "region:us" ]
2023-08-18T10:07:56+00:00
{"pretty_name": "Evaluation run of lmsys/vicuna-13b-delta-v1.1", "dataset_summary": "Dataset automatically created during the evaluation run of model [lmsys/vicuna-13b-delta-v1.1](https://huggingface.co/lmsys/vicuna-13b-delta-v1.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_lmsys__vicuna-13b-delta-v1.1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-15T01:08:28.520609](https://huggingface.co/datasets/open-llm-leaderboard/details_lmsys__vicuna-13b-delta-v1.1/blob/main/results_2023-10-15T01-08-28.520609.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.029677013422818792,\n \"em_stderr\": 0.0017378324714143493,\n \"f1\": 0.09310612416107406,\n \"f1_stderr\": 0.002167792401176146,\n \"acc\": 0.4141695683211732,\n \"acc_stderr\": 0.010019161585538096\n },\n \"harness|drop|3\": {\n \"em\": 0.029677013422818792,\n \"em_stderr\": 0.0017378324714143493,\n \"f1\": 0.09310612416107406,\n \"f1_stderr\": 0.002167792401176146\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.08642911296436695,\n \"acc_stderr\": 0.00774004433710381\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7419100236779794,\n \"acc_stderr\": 0.012298278833972384\n }\n}\n```", "repo_url": "https://huggingface.co/lmsys/vicuna-13b-delta-v1.1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|arc:challenge|25_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_15T01_08_28.520609", "path": ["**/details_harness|drop|3_2023-10-15T01-08-28.520609.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-15T01-08-28.520609.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_15T01_08_28.520609", "path": ["**/details_harness|gsm8k|5_2023-10-15T01-08-28.520609.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-15T01-08-28.520609.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hellaswag|10_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-09T16:35:51.471732.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-09T16:35:51.471732.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-09T16:35:51.471732.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_15T01_08_28.520609", "path": ["**/details_harness|winogrande|5_2023-10-15T01-08-28.520609.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-15T01-08-28.520609.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_09T16_35_51.471732", "path": ["results_2023-08-09T16:35:51.471732.parquet"]}, {"split": "2023_10_15T01_08_28.520609", "path": ["results_2023-10-15T01-08-28.520609.parquet"]}, {"split": "latest", "path": ["results_2023-10-15T01-08-28.520609.parquet"]}]}]}
2023-10-15T00:08:41+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of lmsys/vicuna-13b-delta-v1.1 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model lmsys/vicuna-13b-delta-v1.1 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-15T01:08:28.520609(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of lmsys/vicuna-13b-delta-v1.1", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model lmsys/vicuna-13b-delta-v1.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-15T01:08:28.520609(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of lmsys/vicuna-13b-delta-v1.1", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model lmsys/vicuna-13b-delta-v1.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-15T01:08:28.520609(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 23, 31, 171, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of lmsys/vicuna-13b-delta-v1.1## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model lmsys/vicuna-13b-delta-v1.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-15T01:08:28.520609(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
d87d4b5470ede8a7036f432ee162ad60a39c5dae
# Dataset Card for Evaluation run of lmsys/vicuna-13b-v1.1 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/lmsys/vicuna-13b-v1.1 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [lmsys/vicuna-13b-v1.1](https://huggingface.co/lmsys/vicuna-13b-v1.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_lmsys__vicuna-13b-v1.1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-16T09:09:49.643618](https://huggingface.co/datasets/open-llm-leaderboard/details_lmsys__vicuna-13b-v1.1/blob/main/results_2023-10-16T09-09-49.643618.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.029677013422818792, "em_stderr": 0.0017378324714143493, "f1": 0.09310612416107406, "f1_stderr": 0.002167792401176146, "acc": 0.4141695683211732, "acc_stderr": 0.010019161585538096 }, "harness|drop|3": { "em": 0.029677013422818792, "em_stderr": 0.0017378324714143493, "f1": 0.09310612416107406, "f1_stderr": 0.002167792401176146 }, "harness|gsm8k|5": { "acc": 0.08642911296436695, "acc_stderr": 0.00774004433710381 }, "harness|winogrande|5": { "acc": 0.7419100236779794, "acc_stderr": 0.012298278833972384 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_lmsys__vicuna-13b-v1.1
[ "region:us" ]
2023-08-18T10:08:05+00:00
{"pretty_name": "Evaluation run of lmsys/vicuna-13b-v1.1", "dataset_summary": "Dataset automatically created during the evaluation run of model [lmsys/vicuna-13b-v1.1](https://huggingface.co/lmsys/vicuna-13b-v1.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_lmsys__vicuna-13b-v1.1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-16T09:09:49.643618](https://huggingface.co/datasets/open-llm-leaderboard/details_lmsys__vicuna-13b-v1.1/blob/main/results_2023-10-16T09-09-49.643618.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.029677013422818792,\n \"em_stderr\": 0.0017378324714143493,\n \"f1\": 0.09310612416107406,\n \"f1_stderr\": 0.002167792401176146,\n \"acc\": 0.4141695683211732,\n \"acc_stderr\": 0.010019161585538096\n },\n \"harness|drop|3\": {\n \"em\": 0.029677013422818792,\n \"em_stderr\": 0.0017378324714143493,\n \"f1\": 0.09310612416107406,\n \"f1_stderr\": 0.002167792401176146\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.08642911296436695,\n \"acc_stderr\": 0.00774004433710381\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7419100236779794,\n \"acc_stderr\": 0.012298278833972384\n }\n}\n```", "repo_url": "https://huggingface.co/lmsys/vicuna-13b-v1.1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|arc:challenge|25_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_16T09_09_49.643618", "path": ["**/details_harness|drop|3_2023-10-16T09-09-49.643618.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-16T09-09-49.643618.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_16T09_09_49.643618", "path": ["**/details_harness|gsm8k|5_2023-10-16T09-09-49.643618.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-16T09-09-49.643618.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hellaswag|10_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-24T14:11:02.419209.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-24T14:11:02.419209.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-24T14:11:02.419209.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_16T09_09_49.643618", "path": ["**/details_harness|winogrande|5_2023-10-16T09-09-49.643618.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-16T09-09-49.643618.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_24T14_11_02.419209", "path": ["results_2023-07-24T14:11:02.419209.parquet"]}, {"split": "2023_10_16T09_09_49.643618", "path": ["results_2023-10-16T09-09-49.643618.parquet"]}, {"split": "latest", "path": ["results_2023-10-16T09-09-49.643618.parquet"]}]}]}
2023-10-16T08:10:40+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of lmsys/vicuna-13b-v1.1 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model lmsys/vicuna-13b-v1.1 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-16T09:09:49.643618(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of lmsys/vicuna-13b-v1.1", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model lmsys/vicuna-13b-v1.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-16T09:09:49.643618(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of lmsys/vicuna-13b-v1.1", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model lmsys/vicuna-13b-v1.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-16T09:09:49.643618(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 20, 31, 168, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of lmsys/vicuna-13b-v1.1## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model lmsys/vicuna-13b-v1.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-16T09:09:49.643618(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
b0c83fa6dcebb490d84e3e172231dbefed6ae9f4
# Dataset Card for Evaluation run of chansung/gpt4-alpaca-lora-13b-decapoda-1024 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/chansung/gpt4-alpaca-lora-13b-decapoda-1024 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [chansung/gpt4-alpaca-lora-13b-decapoda-1024](https://huggingface.co/chansung/gpt4-alpaca-lora-13b-decapoda-1024) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_chansung__gpt4-alpaca-lora-13b-decapoda-1024", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-09-17T14:48:01.782551](https://huggingface.co/datasets/open-llm-leaderboard/details_chansung__gpt4-alpaca-lora-13b-decapoda-1024/blob/main/results_2023-09-17T14-48-01.782551.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.006082214765100671, "em_stderr": 0.0007962432393029008, "f1": 0.06735423657718136, "f1_stderr": 0.0015529687729517118, "acc": 0.42730136257586737, "acc_stderr": 0.009642272426310498 }, "harness|drop|3": { "em": 0.006082214765100671, "em_stderr": 0.0007962432393029008, "f1": 0.06735423657718136, "f1_stderr": 0.0015529687729517118 }, "harness|gsm8k|5": { "acc": 0.08112206216830932, "acc_stderr": 0.007520395797922653 }, "harness|winogrande|5": { "acc": 0.7734806629834254, "acc_stderr": 0.011764149054698341 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_chansung__gpt4-alpaca-lora-13b-decapoda-1024
[ "region:us" ]
2023-08-18T10:08:14+00:00
{"pretty_name": "Evaluation run of chansung/gpt4-alpaca-lora-13b-decapoda-1024", "dataset_summary": "Dataset automatically created during the evaluation run of model [chansung/gpt4-alpaca-lora-13b-decapoda-1024](https://huggingface.co/chansung/gpt4-alpaca-lora-13b-decapoda-1024) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_chansung__gpt4-alpaca-lora-13b-decapoda-1024\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-09-17T14:48:01.782551](https://huggingface.co/datasets/open-llm-leaderboard/details_chansung__gpt4-alpaca-lora-13b-decapoda-1024/blob/main/results_2023-09-17T14-48-01.782551.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.006082214765100671,\n \"em_stderr\": 0.0007962432393029008,\n \"f1\": 0.06735423657718136,\n \"f1_stderr\": 0.0015529687729517118,\n \"acc\": 0.42730136257586737,\n \"acc_stderr\": 0.009642272426310498\n },\n \"harness|drop|3\": {\n \"em\": 0.006082214765100671,\n \"em_stderr\": 0.0007962432393029008,\n \"f1\": 0.06735423657718136,\n \"f1_stderr\": 0.0015529687729517118\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.08112206216830932,\n \"acc_stderr\": 0.007520395797922653\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7734806629834254,\n \"acc_stderr\": 0.011764149054698341\n }\n}\n```", "repo_url": "https://huggingface.co/chansung/gpt4-alpaca-lora-13b-decapoda-1024", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|arc:challenge|25_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_09_17T14_48_01.782551", "path": ["**/details_harness|drop|3_2023-09-17T14-48-01.782551.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-09-17T14-48-01.782551.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_09_17T14_48_01.782551", "path": ["**/details_harness|gsm8k|5_2023-09-17T14-48-01.782551.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-09-17T14-48-01.782551.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hellaswag|10_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-09T17:49:33.952223.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-09T17:49:33.952223.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-09T17:49:33.952223.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_09_17T14_48_01.782551", "path": ["**/details_harness|winogrande|5_2023-09-17T14-48-01.782551.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-09-17T14-48-01.782551.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_09T17_49_33.952223", "path": ["results_2023-08-09T17:49:33.952223.parquet"]}, {"split": "2023_09_17T14_48_01.782551", "path": ["results_2023-09-17T14-48-01.782551.parquet"]}, {"split": "latest", "path": ["results_2023-09-17T14-48-01.782551.parquet"]}]}]}
2023-09-17T13:48:13+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of chansung/gpt4-alpaca-lora-13b-decapoda-1024 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model chansung/gpt4-alpaca-lora-13b-decapoda-1024 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-09-17T14:48:01.782551(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of chansung/gpt4-alpaca-lora-13b-decapoda-1024", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model chansung/gpt4-alpaca-lora-13b-decapoda-1024 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-17T14:48:01.782551(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of chansung/gpt4-alpaca-lora-13b-decapoda-1024", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model chansung/gpt4-alpaca-lora-13b-decapoda-1024 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-17T14:48:01.782551(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 28, 31, 176, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of chansung/gpt4-alpaca-lora-13b-decapoda-1024## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model chansung/gpt4-alpaca-lora-13b-decapoda-1024 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-09-17T14:48:01.782551(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
acb7dd9488203f29e571eb39335211a20a447b53
# Dataset Card for Evaluation run of breadlicker45/dough-instruct-base-001 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/breadlicker45/dough-instruct-base-001 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [breadlicker45/dough-instruct-base-001](https://huggingface.co/breadlicker45/dough-instruct-base-001) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_breadlicker45__dough-instruct-base-001", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-09-16T20:08:34.854052](https://huggingface.co/datasets/open-llm-leaderboard/details_breadlicker45__dough-instruct-base-001/blob/main/results_2023-09-16T20-08-34.854052.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0, "em_stderr": 0.0, "f1": 0.0029163171140939564, "f1_stderr": 0.00019355490209304062, "acc": 0.255327545382794, "acc_stderr": 0.007024647268145198 }, "harness|drop|3": { "em": 0.0, "em_stderr": 0.0, "f1": 0.0029163171140939564, "f1_stderr": 0.00019355490209304062 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 }, "harness|winogrande|5": { "acc": 0.510655090765588, "acc_stderr": 0.014049294536290396 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_breadlicker45__dough-instruct-base-001
[ "region:us" ]
2023-08-18T10:08:23+00:00
{"pretty_name": "Evaluation run of breadlicker45/dough-instruct-base-001", "dataset_summary": "Dataset automatically created during the evaluation run of model [breadlicker45/dough-instruct-base-001](https://huggingface.co/breadlicker45/dough-instruct-base-001) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_breadlicker45__dough-instruct-base-001\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-09-16T20:08:34.854052](https://huggingface.co/datasets/open-llm-leaderboard/details_breadlicker45__dough-instruct-base-001/blob/main/results_2023-09-16T20-08-34.854052.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0,\n \"em_stderr\": 0.0,\n \"f1\": 0.0029163171140939564,\n \"f1_stderr\": 0.00019355490209304062,\n \"acc\": 0.255327545382794,\n \"acc_stderr\": 0.007024647268145198\n },\n \"harness|drop|3\": {\n \"em\": 0.0,\n \"em_stderr\": 0.0,\n \"f1\": 0.0029163171140939564,\n \"f1_stderr\": 0.00019355490209304062\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.510655090765588,\n \"acc_stderr\": 0.014049294536290396\n }\n}\n```", "repo_url": "https://huggingface.co/breadlicker45/dough-instruct-base-001", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|arc:challenge|25_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_09_16T20_08_34.854052", "path": ["**/details_harness|drop|3_2023-09-16T20-08-34.854052.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-09-16T20-08-34.854052.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_09_16T20_08_34.854052", "path": ["**/details_harness|gsm8k|5_2023-09-16T20-08-34.854052.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-09-16T20-08-34.854052.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hellaswag|10_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-02T14:53:33.597217.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-02T14:53:33.597217.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-02T14:53:33.597217.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_09_16T20_08_34.854052", "path": ["**/details_harness|winogrande|5_2023-09-16T20-08-34.854052.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-09-16T20-08-34.854052.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_02T14_53_33.597217", "path": ["results_2023-08-02T14:53:33.597217.parquet"]}, {"split": "2023_09_16T20_08_34.854052", "path": ["results_2023-09-16T20-08-34.854052.parquet"]}, {"split": "latest", "path": ["results_2023-09-16T20-08-34.854052.parquet"]}]}]}
2023-09-16T19:08:46+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of breadlicker45/dough-instruct-base-001 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model breadlicker45/dough-instruct-base-001 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-09-16T20:08:34.854052(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of breadlicker45/dough-instruct-base-001", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model breadlicker45/dough-instruct-base-001 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-16T20:08:34.854052(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of breadlicker45/dough-instruct-base-001", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model breadlicker45/dough-instruct-base-001 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-16T20:08:34.854052(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 23, 31, 171, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of breadlicker45/dough-instruct-base-001## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model breadlicker45/dough-instruct-base-001 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-09-16T20:08:34.854052(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
e7701f68824880ed8095680c3ca0f1a73e2789b2
# Dataset Card for Evaluation run of georgesung/llama2_7b_chat_uncensored ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/georgesung/llama2_7b_chat_uncensored - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [georgesung/llama2_7b_chat_uncensored](https://huggingface.co/georgesung/llama2_7b_chat_uncensored) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_georgesung__llama2_7b_chat_uncensored", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-09-17T06:01:34.534802](https://huggingface.co/datasets/open-llm-leaderboard/details_georgesung__llama2_7b_chat_uncensored/blob/main/results_2023-09-17T06-01-34.534802.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0018875838926174498, "em_stderr": 0.0004445109990558761, "f1": 0.05687290268456382, "f1_stderr": 0.0013311620250832507, "acc": 0.3997491582259886, "acc_stderr": 0.009384299684412923 }, "harness|drop|3": { "em": 0.0018875838926174498, "em_stderr": 0.0004445109990558761, "f1": 0.05687290268456382, "f1_stderr": 0.0013311620250832507 }, "harness|gsm8k|5": { "acc": 0.058377558756633814, "acc_stderr": 0.0064580835578324685 }, "harness|winogrande|5": { "acc": 0.7411207576953434, "acc_stderr": 0.012310515810993376 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_georgesung__llama2_7b_chat_uncensored
[ "region:us" ]
2023-08-18T10:08:31+00:00
{"pretty_name": "Evaluation run of georgesung/llama2_7b_chat_uncensored", "dataset_summary": "Dataset automatically created during the evaluation run of model [georgesung/llama2_7b_chat_uncensored](https://huggingface.co/georgesung/llama2_7b_chat_uncensored) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_georgesung__llama2_7b_chat_uncensored\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-09-17T06:01:34.534802](https://huggingface.co/datasets/open-llm-leaderboard/details_georgesung__llama2_7b_chat_uncensored/blob/main/results_2023-09-17T06-01-34.534802.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0018875838926174498,\n \"em_stderr\": 0.0004445109990558761,\n \"f1\": 0.05687290268456382,\n \"f1_stderr\": 0.0013311620250832507,\n \"acc\": 0.3997491582259886,\n \"acc_stderr\": 0.009384299684412923\n },\n \"harness|drop|3\": {\n \"em\": 0.0018875838926174498,\n \"em_stderr\": 0.0004445109990558761,\n \"f1\": 0.05687290268456382,\n \"f1_stderr\": 0.0013311620250832507\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.058377558756633814,\n \"acc_stderr\": 0.0064580835578324685\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7411207576953434,\n \"acc_stderr\": 0.012310515810993376\n }\n}\n```", "repo_url": "https://huggingface.co/georgesung/llama2_7b_chat_uncensored", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|arc:challenge|25_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_09_17T06_01_34.534802", "path": ["**/details_harness|drop|3_2023-09-17T06-01-34.534802.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-09-17T06-01-34.534802.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_09_17T06_01_34.534802", "path": ["**/details_harness|gsm8k|5_2023-09-17T06-01-34.534802.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-09-17T06-01-34.534802.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hellaswag|10_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-24T11:17:24.189192.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-24T11:17:24.189192.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-24T11:17:24.189192.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_09_17T06_01_34.534802", "path": ["**/details_harness|winogrande|5_2023-09-17T06-01-34.534802.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-09-17T06-01-34.534802.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_24T11_17_24.189192", "path": ["results_2023-07-24T11:17:24.189192.parquet"]}, {"split": "2023_09_17T06_01_34.534802", "path": ["results_2023-09-17T06-01-34.534802.parquet"]}, {"split": "latest", "path": ["results_2023-09-17T06-01-34.534802.parquet"]}]}]}
2023-09-17T05:01:46+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of georgesung/llama2_7b_chat_uncensored ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model georgesung/llama2_7b_chat_uncensored on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-09-17T06:01:34.534802(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of georgesung/llama2_7b_chat_uncensored", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model georgesung/llama2_7b_chat_uncensored on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-17T06:01:34.534802(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of georgesung/llama2_7b_chat_uncensored", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model georgesung/llama2_7b_chat_uncensored on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-17T06:01:34.534802(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 26, 31, 174, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of georgesung/llama2_7b_chat_uncensored## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model georgesung/llama2_7b_chat_uncensored on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-09-17T06:01:34.534802(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
95853e2795a5b1f007514461c7c0188f3c618695
# Dataset Card for Evaluation run of NYTK/PULI-GPTrio ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/NYTK/PULI-GPTrio - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [NYTK/PULI-GPTrio](https://huggingface.co/NYTK/PULI-GPTrio) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_NYTK__PULI-GPTrio", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-16T23:53:17.046338](https://huggingface.co/datasets/open-llm-leaderboard/details_NYTK__PULI-GPTrio/blob/main/results_2023-10-16T23-53-17.046338.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0003145973154362416, "em_stderr": 0.00018161379468840182, "f1": 0.04027160234899333, "f1_stderr": 0.0010684123060497892, "acc": 0.2926621002134429, "acc_stderr": 0.00813543177136788 }, "harness|drop|3": { "em": 0.0003145973154362416, "em_stderr": 0.00018161379468840182, "f1": 0.04027160234899333, "f1_stderr": 0.0010684123060497892 }, "harness|gsm8k|5": { "acc": 0.0075815011372251705, "acc_stderr": 0.002389281512077206 }, "harness|winogrande|5": { "acc": 0.5777426992896606, "acc_stderr": 0.013881582030658552 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_NYTK__PULI-GPTrio
[ "region:us" ]
2023-08-18T10:08:40+00:00
{"pretty_name": "Evaluation run of NYTK/PULI-GPTrio", "dataset_summary": "Dataset automatically created during the evaluation run of model [NYTK/PULI-GPTrio](https://huggingface.co/NYTK/PULI-GPTrio) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_NYTK__PULI-GPTrio\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-16T23:53:17.046338](https://huggingface.co/datasets/open-llm-leaderboard/details_NYTK__PULI-GPTrio/blob/main/results_2023-10-16T23-53-17.046338.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0003145973154362416,\n \"em_stderr\": 0.00018161379468840182,\n \"f1\": 0.04027160234899333,\n \"f1_stderr\": 0.0010684123060497892,\n \"acc\": 0.2926621002134429,\n \"acc_stderr\": 0.00813543177136788\n },\n \"harness|drop|3\": {\n \"em\": 0.0003145973154362416,\n \"em_stderr\": 0.00018161379468840182,\n \"f1\": 0.04027160234899333,\n \"f1_stderr\": 0.0010684123060497892\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0075815011372251705,\n \"acc_stderr\": 0.002389281512077206\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5777426992896606,\n \"acc_stderr\": 0.013881582030658552\n }\n}\n```", "repo_url": "https://huggingface.co/NYTK/PULI-GPTrio", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|arc:challenge|25_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_16T23_53_17.046338", "path": ["**/details_harness|drop|3_2023-10-16T23-53-17.046338.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-16T23-53-17.046338.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_16T23_53_17.046338", "path": ["**/details_harness|gsm8k|5_2023-10-16T23-53-17.046338.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-16T23-53-17.046338.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hellaswag|10_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T19:35:43.843244.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T19:35:43.843244.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T19:35:43.843244.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_16T23_53_17.046338", "path": ["**/details_harness|winogrande|5_2023-10-16T23-53-17.046338.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-16T23-53-17.046338.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_19T19_35_43.843244", "path": ["results_2023-07-19T19:35:43.843244.parquet"]}, {"split": "2023_10_16T23_53_17.046338", "path": ["results_2023-10-16T23-53-17.046338.parquet"]}, {"split": "latest", "path": ["results_2023-10-16T23-53-17.046338.parquet"]}]}]}
2023-10-16T22:53:29+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of NYTK/PULI-GPTrio ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model NYTK/PULI-GPTrio on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-16T23:53:17.046338(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of NYTK/PULI-GPTrio", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model NYTK/PULI-GPTrio on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-16T23:53:17.046338(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of NYTK/PULI-GPTrio", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model NYTK/PULI-GPTrio on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-16T23:53:17.046338(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 18, 31, 166, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of NYTK/PULI-GPTrio## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model NYTK/PULI-GPTrio on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-16T23:53:17.046338(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
e8639ba5979f99f4cca0787e2a3ee9d7c8f08911
# Dataset Card for Evaluation run of The-Face-Of-Goonery/huginnv1.2 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/The-Face-Of-Goonery/huginnv1.2 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [The-Face-Of-Goonery/huginnv1.2](https://huggingface.co/The-Face-Of-Goonery/huginnv1.2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_The-Face-Of-Goonery__huginnv1.2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-15T03:33:52.049592](https://huggingface.co/datasets/open-llm-leaderboard/details_The-Face-Of-Goonery__huginnv1.2/blob/main/results_2023-10-15T03-33-52.049592.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.13464765100671142, "em_stderr": 0.0034957110748356193, "f1": 0.20755138422818709, "f1_stderr": 0.0036341951060626636, "acc": 0.421953322606337, "acc_stderr": 0.01004266408410234 }, "harness|drop|3": { "em": 0.13464765100671142, "em_stderr": 0.0034957110748356193, "f1": 0.20755138422818709, "f1_stderr": 0.0036341951060626636 }, "harness|gsm8k|5": { "acc": 0.09173616376042457, "acc_stderr": 0.00795094214833933 }, "harness|winogrande|5": { "acc": 0.7521704814522494, "acc_stderr": 0.01213438601986535 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_The-Face-Of-Goonery__huginnv1.2
[ "region:us" ]
2023-08-18T10:08:49+00:00
{"pretty_name": "Evaluation run of The-Face-Of-Goonery/huginnv1.2", "dataset_summary": "Dataset automatically created during the evaluation run of model [The-Face-Of-Goonery/huginnv1.2](https://huggingface.co/The-Face-Of-Goonery/huginnv1.2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_The-Face-Of-Goonery__huginnv1.2\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-15T03:33:52.049592](https://huggingface.co/datasets/open-llm-leaderboard/details_The-Face-Of-Goonery__huginnv1.2/blob/main/results_2023-10-15T03-33-52.049592.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.13464765100671142,\n \"em_stderr\": 0.0034957110748356193,\n \"f1\": 0.20755138422818709,\n \"f1_stderr\": 0.0036341951060626636,\n \"acc\": 0.421953322606337,\n \"acc_stderr\": 0.01004266408410234\n },\n \"harness|drop|3\": {\n \"em\": 0.13464765100671142,\n \"em_stderr\": 0.0034957110748356193,\n \"f1\": 0.20755138422818709,\n \"f1_stderr\": 0.0036341951060626636\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.09173616376042457,\n \"acc_stderr\": 0.00795094214833933\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7521704814522494,\n \"acc_stderr\": 0.01213438601986535\n }\n}\n```", "repo_url": "https://huggingface.co/The-Face-Of-Goonery/huginnv1.2", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|arc:challenge|25_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_15T03_33_52.049592", "path": ["**/details_harness|drop|3_2023-10-15T03-33-52.049592.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-15T03-33-52.049592.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_15T03_33_52.049592", "path": ["**/details_harness|gsm8k|5_2023-10-15T03-33-52.049592.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-15T03-33-52.049592.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hellaswag|10_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-09T23:01:31.106825.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-09T23:01:31.106825.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-09T23:01:31.106825.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_15T03_33_52.049592", "path": ["**/details_harness|winogrande|5_2023-10-15T03-33-52.049592.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-15T03-33-52.049592.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_09T23_01_31.106825", "path": ["results_2023-08-09T23:01:31.106825.parquet"]}, {"split": "2023_10_15T03_33_52.049592", "path": ["results_2023-10-15T03-33-52.049592.parquet"]}, {"split": "latest", "path": ["results_2023-10-15T03-33-52.049592.parquet"]}]}]}
2023-10-15T02:34:04+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of The-Face-Of-Goonery/huginnv1.2 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model The-Face-Of-Goonery/huginnv1.2 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-15T03:33:52.049592(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of The-Face-Of-Goonery/huginnv1.2", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model The-Face-Of-Goonery/huginnv1.2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-15T03:33:52.049592(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of The-Face-Of-Goonery/huginnv1.2", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model The-Face-Of-Goonery/huginnv1.2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-15T03:33:52.049592(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 24, 31, 172, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of The-Face-Of-Goonery/huginnv1.2## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model The-Face-Of-Goonery/huginnv1.2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-15T03:33:52.049592(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
c98406002c54eec39f3d6503f027295c56dc48b2
# Dataset Card for Evaluation run of The-Face-Of-Goonery/Chronos-Beluga-v2-13bfp16 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/The-Face-Of-Goonery/Chronos-Beluga-v2-13bfp16 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [The-Face-Of-Goonery/Chronos-Beluga-v2-13bfp16](https://huggingface.co/The-Face-Of-Goonery/Chronos-Beluga-v2-13bfp16) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_The-Face-Of-Goonery__Chronos-Beluga-v2-13bfp16", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-09-18T19:11:05.927806](https://huggingface.co/datasets/open-llm-leaderboard/details_The-Face-Of-Goonery__Chronos-Beluga-v2-13bfp16/blob/main/results_2023-09-18T19-11-05.927806.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.1196518456375839, "em_stderr": 0.0033237364616341856, "f1": 0.18612311241610655, "f1_stderr": 0.003456805841321019, "acc": 0.3921054253509362, "acc_stderr": 0.009071968047164727 }, "harness|drop|3": { "em": 0.1196518456375839, "em_stderr": 0.0033237364616341856, "f1": 0.18612311241610655, "f1_stderr": 0.003456805841321019 }, "harness|gsm8k|5": { "acc": 0.04624715693707354, "acc_stderr": 0.005784991662691891 }, "harness|winogrande|5": { "acc": 0.7379636937647988, "acc_stderr": 0.012358944431637561 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_The-Face-Of-Goonery__Chronos-Beluga-v2-13bfp16
[ "region:us" ]
2023-08-18T10:08:57+00:00
{"pretty_name": "Evaluation run of The-Face-Of-Goonery/Chronos-Beluga-v2-13bfp16", "dataset_summary": "Dataset automatically created during the evaluation run of model [The-Face-Of-Goonery/Chronos-Beluga-v2-13bfp16](https://huggingface.co/The-Face-Of-Goonery/Chronos-Beluga-v2-13bfp16) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_The-Face-Of-Goonery__Chronos-Beluga-v2-13bfp16\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-09-18T19:11:05.927806](https://huggingface.co/datasets/open-llm-leaderboard/details_The-Face-Of-Goonery__Chronos-Beluga-v2-13bfp16/blob/main/results_2023-09-18T19-11-05.927806.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.1196518456375839,\n \"em_stderr\": 0.0033237364616341856,\n \"f1\": 0.18612311241610655,\n \"f1_stderr\": 0.003456805841321019,\n \"acc\": 0.3921054253509362,\n \"acc_stderr\": 0.009071968047164727\n },\n \"harness|drop|3\": {\n \"em\": 0.1196518456375839,\n \"em_stderr\": 0.0033237364616341856,\n \"f1\": 0.18612311241610655,\n \"f1_stderr\": 0.003456805841321019\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.04624715693707354,\n \"acc_stderr\": 0.005784991662691891\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7379636937647988,\n \"acc_stderr\": 0.012358944431637561\n }\n}\n```", "repo_url": "https://huggingface.co/The-Face-Of-Goonery/Chronos-Beluga-v2-13bfp16", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|arc:challenge|25_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_09_18T19_11_05.927806", "path": ["**/details_harness|drop|3_2023-09-18T19-11-05.927806.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-09-18T19-11-05.927806.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_09_18T19_11_05.927806", "path": ["**/details_harness|gsm8k|5_2023-09-18T19-11-05.927806.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-09-18T19-11-05.927806.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hellaswag|10_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-09T10:53:07.090454.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-09T10:53:07.090454.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-09T10:53:07.090454.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_09_18T19_11_05.927806", "path": ["**/details_harness|winogrande|5_2023-09-18T19-11-05.927806.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-09-18T19-11-05.927806.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_09T10_53_07.090454", "path": ["results_2023-08-09T10:53:07.090454.parquet"]}, {"split": "2023_09_18T19_11_05.927806", "path": ["results_2023-09-18T19-11-05.927806.parquet"]}, {"split": "latest", "path": ["results_2023-09-18T19-11-05.927806.parquet"]}]}]}
2023-09-18T18:11:18+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of The-Face-Of-Goonery/Chronos-Beluga-v2-13bfp16 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model The-Face-Of-Goonery/Chronos-Beluga-v2-13bfp16 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-09-18T19:11:05.927806(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of The-Face-Of-Goonery/Chronos-Beluga-v2-13bfp16", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model The-Face-Of-Goonery/Chronos-Beluga-v2-13bfp16 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-18T19:11:05.927806(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of The-Face-Of-Goonery/Chronos-Beluga-v2-13bfp16", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model The-Face-Of-Goonery/Chronos-Beluga-v2-13bfp16 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-18T19:11:05.927806(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 33, 31, 181, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of The-Face-Of-Goonery/Chronos-Beluga-v2-13bfp16## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model The-Face-Of-Goonery/Chronos-Beluga-v2-13bfp16 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-09-18T19:11:05.927806(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
3279d56b2a5c8831eafc9cad1d97228400ea7fbd
# Dataset Card for Evaluation run of The-Face-Of-Goonery/Huginn-13b-FP16 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/The-Face-Of-Goonery/Huginn-13b-FP16 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [The-Face-Of-Goonery/Huginn-13b-FP16](https://huggingface.co/The-Face-Of-Goonery/Huginn-13b-FP16) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_The-Face-Of-Goonery__Huginn-13b-FP16", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-17T23:23:06.857366](https://huggingface.co/datasets/open-llm-leaderboard/details_The-Face-Of-Goonery__Huginn-13b-FP16/blob/main/results_2023-10-17T23-23-06.857366.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.33609479865771813, "em_stderr": 0.004837529011799984, "f1": 0.41438129194631024, "f1_stderr": 0.004663694796707255, "acc": 0.39019449213217305, "acc_stderr": 0.008985955021249931 }, "harness|drop|3": { "em": 0.33609479865771813, "em_stderr": 0.004837529011799984, "f1": 0.41438129194631024, "f1_stderr": 0.004663694796707255 }, "harness|gsm8k|5": { "acc": 0.043214556482183475, "acc_stderr": 0.005600987515237852 }, "harness|winogrande|5": { "acc": 0.7371744277821626, "acc_stderr": 0.01237092252726201 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_The-Face-Of-Goonery__Huginn-13b-FP16
[ "region:us" ]
2023-08-18T10:09:06+00:00
{"pretty_name": "Evaluation run of The-Face-Of-Goonery/Huginn-13b-FP16", "dataset_summary": "Dataset automatically created during the evaluation run of model [The-Face-Of-Goonery/Huginn-13b-FP16](https://huggingface.co/The-Face-Of-Goonery/Huginn-13b-FP16) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_The-Face-Of-Goonery__Huginn-13b-FP16\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-17T23:23:06.857366](https://huggingface.co/datasets/open-llm-leaderboard/details_The-Face-Of-Goonery__Huginn-13b-FP16/blob/main/results_2023-10-17T23-23-06.857366.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.33609479865771813,\n \"em_stderr\": 0.004837529011799984,\n \"f1\": 0.41438129194631024,\n \"f1_stderr\": 0.004663694796707255,\n \"acc\": 0.39019449213217305,\n \"acc_stderr\": 0.008985955021249931\n },\n \"harness|drop|3\": {\n \"em\": 0.33609479865771813,\n \"em_stderr\": 0.004837529011799984,\n \"f1\": 0.41438129194631024,\n \"f1_stderr\": 0.004663694796707255\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.043214556482183475,\n \"acc_stderr\": 0.005600987515237852\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7371744277821626,\n \"acc_stderr\": 0.01237092252726201\n }\n}\n```", "repo_url": "https://huggingface.co/The-Face-Of-Goonery/Huginn-13b-FP16", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|arc:challenge|25_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_17T23_23_06.857366", "path": ["**/details_harness|drop|3_2023-10-17T23-23-06.857366.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-17T23-23-06.857366.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_17T23_23_06.857366", "path": ["**/details_harness|gsm8k|5_2023-10-17T23-23-06.857366.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-17T23-23-06.857366.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hellaswag|10_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-09T13:30:49.317288.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-09T13:30:49.317288.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-09T13:30:49.317288.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_17T23_23_06.857366", "path": ["**/details_harness|winogrande|5_2023-10-17T23-23-06.857366.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-17T23-23-06.857366.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_09T13_30_49.317288", "path": ["results_2023-08-09T13:30:49.317288.parquet"]}, {"split": "2023_10_17T23_23_06.857366", "path": ["results_2023-10-17T23-23-06.857366.parquet"]}, {"split": "latest", "path": ["results_2023-10-17T23-23-06.857366.parquet"]}]}]}
2023-10-17T22:23:19+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of The-Face-Of-Goonery/Huginn-13b-FP16 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model The-Face-Of-Goonery/Huginn-13b-FP16 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-17T23:23:06.857366(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of The-Face-Of-Goonery/Huginn-13b-FP16", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model The-Face-Of-Goonery/Huginn-13b-FP16 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-17T23:23:06.857366(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of The-Face-Of-Goonery/Huginn-13b-FP16", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model The-Face-Of-Goonery/Huginn-13b-FP16 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-17T23:23:06.857366(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 27, 31, 175, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of The-Face-Of-Goonery/Huginn-13b-FP16## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model The-Face-Of-Goonery/Huginn-13b-FP16 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-17T23:23:06.857366(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
cd1f7c1edebe7e5ef0d1c6a8376e74158d0eea76
# Dataset Card for Evaluation run of facebook/opt-6.7b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/facebook/opt-6.7b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [facebook/opt-6.7b](https://huggingface.co/facebook/opt-6.7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_facebook__opt-6.7b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-17T21:53:46.155351](https://huggingface.co/datasets/open-llm-leaderboard/details_facebook__opt-6.7b/blob/main/results_2023-10-17T21-53-46.155351.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.001153523489932886, "em_stderr": 0.0003476179896857095, "f1": 0.04860633389261755, "f1_stderr": 0.0011917611903016134, "acc": 0.33484115648110635, "acc_stderr": 0.008018147669729529 }, "harness|drop|3": { "em": 0.001153523489932886, "em_stderr": 0.0003476179896857095, "f1": 0.04860633389261755, "f1_stderr": 0.0011917611903016134 }, "harness|gsm8k|5": { "acc": 0.009855951478392721, "acc_stderr": 0.002721076577041659 }, "harness|winogrande|5": { "acc": 0.65982636148382, "acc_stderr": 0.013315218762417397 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_facebook__opt-6.7b
[ "region:us" ]
2023-08-18T10:09:15+00:00
{"pretty_name": "Evaluation run of facebook/opt-6.7b", "dataset_summary": "Dataset automatically created during the evaluation run of model [facebook/opt-6.7b](https://huggingface.co/facebook/opt-6.7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_facebook__opt-6.7b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-17T21:53:46.155351](https://huggingface.co/datasets/open-llm-leaderboard/details_facebook__opt-6.7b/blob/main/results_2023-10-17T21-53-46.155351.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.001153523489932886,\n \"em_stderr\": 0.0003476179896857095,\n \"f1\": 0.04860633389261755,\n \"f1_stderr\": 0.0011917611903016134,\n \"acc\": 0.33484115648110635,\n \"acc_stderr\": 0.008018147669729529\n },\n \"harness|drop|3\": {\n \"em\": 0.001153523489932886,\n \"em_stderr\": 0.0003476179896857095,\n \"f1\": 0.04860633389261755,\n \"f1_stderr\": 0.0011917611903016134\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.009855951478392721,\n \"acc_stderr\": 0.002721076577041659\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.65982636148382,\n \"acc_stderr\": 0.013315218762417397\n }\n}\n```", "repo_url": "https://huggingface.co/facebook/opt-6.7b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|arc:challenge|25_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_17T21_53_46.155351", "path": ["**/details_harness|drop|3_2023-10-17T21-53-46.155351.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-17T21-53-46.155351.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_17T21_53_46.155351", "path": ["**/details_harness|gsm8k|5_2023-10-17T21-53-46.155351.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-17T21-53-46.155351.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hellaswag|10_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T17:02:51.507873.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T17:02:51.507873.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T17:02:51.507873.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_17T21_53_46.155351", "path": ["**/details_harness|winogrande|5_2023-10-17T21-53-46.155351.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-17T21-53-46.155351.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_19T17_02_51.507873", "path": ["results_2023-07-19T17:02:51.507873.parquet"]}, {"split": "2023_10_17T21_53_46.155351", "path": ["results_2023-10-17T21-53-46.155351.parquet"]}, {"split": "latest", "path": ["results_2023-10-17T21-53-46.155351.parquet"]}]}]}
2023-10-17T20:53:58+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of facebook/opt-6.7b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model facebook/opt-6.7b on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-17T21:53:46.155351(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of facebook/opt-6.7b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model facebook/opt-6.7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-17T21:53:46.155351(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of facebook/opt-6.7b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model facebook/opt-6.7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-17T21:53:46.155351(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 16, 31, 164, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of facebook/opt-6.7b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model facebook/opt-6.7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-17T21:53:46.155351(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
9115728ec0e9df50b586e55498c69f6d2f9c663b
# Dataset Card for Evaluation run of facebook/opt-30b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/facebook/opt-30b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [facebook/opt-30b](https://huggingface.co/facebook/opt-30b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 122 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 5 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_facebook__opt-30b", "harness_gsm8k_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-02T22:44:52.889942](https://huggingface.co/datasets/open-llm-leaderboard/details_facebook__opt-30b/blob/main/results_2023-12-02T22-44-52.889942.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.021986353297952996, "acc_stderr": 0.004039162758110029 }, "harness|gsm8k|5": { "acc": 0.021986353297952996, "acc_stderr": 0.004039162758110029 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_facebook__opt-30b
[ "region:us" ]
2023-08-18T10:09:23+00:00
{"pretty_name": "Evaluation run of facebook/opt-30b", "dataset_summary": "Dataset automatically created during the evaluation run of model [facebook/opt-30b](https://huggingface.co/facebook/opt-30b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 122 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 5 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_facebook__opt-30b\",\n\t\"harness_gsm8k_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-02T22:44:52.889942](https://huggingface.co/datasets/open-llm-leaderboard/details_facebook__opt-30b/blob/main/results_2023-12-02T22-44-52.889942.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.021986353297952996,\n \"acc_stderr\": 0.004039162758110029\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.021986353297952996,\n \"acc_stderr\": 0.004039162758110029\n }\n}\n```", "repo_url": "https://huggingface.co/facebook/opt-30b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|arc:challenge|25_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_09_09T12_38_44.623374", "path": ["**/details_harness|drop|3_2023-09-09T12-38-44.623374.parquet"]}, {"split": "2023_09_17T20_41_44.085857", "path": ["**/details_harness|drop|3_2023-09-17T20-41-44.085857.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-09-17T20-41-44.085857.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_09_09T12_38_44.623374", "path": ["**/details_harness|gsm8k|5_2023-09-09T12-38-44.623374.parquet"]}, {"split": "2023_09_17T20_41_44.085857", "path": ["**/details_harness|gsm8k|5_2023-09-17T20-41-44.085857.parquet"]}, {"split": "2023_12_02T22_44_52.889942", "path": ["**/details_harness|gsm8k|5_2023-12-02T22-44-52.889942.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-02T22-44-52.889942.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hellaswag|10_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T22:24:27.765109.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T22:24:27.765109.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T22:24:27.765109.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_09_09T12_38_44.623374", "path": ["**/details_harness|winogrande|5_2023-09-09T12-38-44.623374.parquet"]}, {"split": "2023_09_17T20_41_44.085857", "path": ["**/details_harness|winogrande|5_2023-09-17T20-41-44.085857.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-09-17T20-41-44.085857.parquet"]}]}, {"config_name": "original_mmlu_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:abstract_algebra|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:anatomy|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:astronomy|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:business_ethics|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:clinical_knowledge|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:college_biology|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:college_chemistry|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:college_computer_science|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:college_mathematics|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:college_medicine|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:college_physics|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:computer_security|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:conceptual_physics|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:econometrics|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:electrical_engineering|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:elementary_mathematics|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:formal_logic|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:global_facts|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:high_school_biology|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:high_school_chemistry|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:high_school_computer_science|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:high_school_european_history|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:high_school_geography|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:high_school_government_and_politics|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:high_school_macroeconomics|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:high_school_mathematics|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:high_school_microeconomics|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:high_school_physics|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:high_school_psychology|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:high_school_statistics|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:high_school_us_history|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:high_school_world_history|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:human_aging|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:human_sexuality|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:international_law|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:jurisprudence|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:logical_fallacies|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:machine_learning|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:management|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:marketing|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:medical_genetics|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:miscellaneous|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:moral_disputes|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:moral_scenarios|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:nutrition|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:philosophy|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:prehistory|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:professional_accounting|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:professional_law|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:professional_medicine|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:professional_psychology|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:public_relations|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:security_studies|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:sociology|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:us_foreign_policy|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:virology|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:world_religions|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:abstract_algebra|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:anatomy|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:astronomy|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:business_ethics|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:clinical_knowledge|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:college_biology|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:college_chemistry|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:college_computer_science|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:college_mathematics|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:college_medicine|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:college_physics|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:computer_security|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:conceptual_physics|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:econometrics|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:electrical_engineering|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:elementary_mathematics|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:formal_logic|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:global_facts|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:high_school_biology|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:high_school_chemistry|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:high_school_computer_science|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:high_school_european_history|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:high_school_geography|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:high_school_government_and_politics|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:high_school_macroeconomics|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:high_school_mathematics|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:high_school_microeconomics|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:high_school_physics|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:high_school_psychology|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:high_school_statistics|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:high_school_us_history|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:high_school_world_history|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:human_aging|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:human_sexuality|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:international_law|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:jurisprudence|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:logical_fallacies|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:machine_learning|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:management|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:marketing|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:medical_genetics|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:miscellaneous|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:moral_disputes|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:moral_scenarios|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:nutrition|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:philosophy|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:prehistory|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:professional_accounting|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:professional_law|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:professional_medicine|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:professional_psychology|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:public_relations|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:security_studies|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:sociology|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:us_foreign_policy|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:virology|5_2023-08-28T20:58:48.353368.parquet", "**/details_original|mmlu:world_religions|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_abstract_algebra_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:abstract_algebra|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:abstract_algebra|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_anatomy_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:anatomy|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:anatomy|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_astronomy_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:astronomy|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:astronomy|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_business_ethics_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:business_ethics|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:business_ethics|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_clinical_knowledge_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:clinical_knowledge|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:clinical_knowledge|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_college_biology_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:college_biology|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:college_biology|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_college_chemistry_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:college_chemistry|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:college_chemistry|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_college_computer_science_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:college_computer_science|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:college_computer_science|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_college_mathematics_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:college_mathematics|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:college_mathematics|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_college_medicine_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:college_medicine|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:college_medicine|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_college_physics_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:college_physics|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:college_physics|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_computer_security_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:computer_security|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:computer_security|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_conceptual_physics_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:conceptual_physics|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:conceptual_physics|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_econometrics_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:econometrics|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:econometrics|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_electrical_engineering_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:electrical_engineering|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:electrical_engineering|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_elementary_mathematics_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:elementary_mathematics|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:elementary_mathematics|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_formal_logic_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:formal_logic|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:formal_logic|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_global_facts_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:global_facts|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:global_facts|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_high_school_biology_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:high_school_biology|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_biology|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_high_school_chemistry_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:high_school_chemistry|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_chemistry|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_high_school_computer_science_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:high_school_computer_science|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_computer_science|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_high_school_european_history_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:high_school_european_history|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_european_history|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_high_school_geography_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:high_school_geography|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_geography|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:high_school_government_and_politics|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_government_and_politics|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:high_school_macroeconomics|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_macroeconomics|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_high_school_mathematics_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:high_school_mathematics|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_mathematics|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_high_school_microeconomics_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:high_school_microeconomics|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_microeconomics|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_high_school_physics_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:high_school_physics|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_physics|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_high_school_psychology_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:high_school_psychology|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_psychology|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_high_school_statistics_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:high_school_statistics|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_statistics|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_high_school_us_history_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:high_school_us_history|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_us_history|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_high_school_world_history_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:high_school_world_history|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_world_history|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_human_aging_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:human_aging|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:human_aging|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_human_sexuality_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:human_sexuality|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:human_sexuality|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_international_law_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:international_law|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:international_law|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_jurisprudence_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:jurisprudence|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:jurisprudence|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_logical_fallacies_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:logical_fallacies|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:logical_fallacies|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_machine_learning_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:machine_learning|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:machine_learning|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_management_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:management|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:management|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_marketing_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:marketing|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:marketing|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_medical_genetics_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:medical_genetics|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:medical_genetics|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_miscellaneous_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:miscellaneous|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:miscellaneous|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_moral_disputes_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:moral_disputes|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:moral_disputes|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_moral_scenarios_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:moral_scenarios|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:moral_scenarios|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_nutrition_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:nutrition|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:nutrition|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_philosophy_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:philosophy|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:philosophy|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_prehistory_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:prehistory|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:prehistory|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_professional_accounting_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:professional_accounting|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:professional_accounting|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_professional_law_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:professional_law|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:professional_law|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_professional_medicine_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:professional_medicine|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:professional_medicine|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_professional_psychology_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:professional_psychology|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:professional_psychology|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_public_relations_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:public_relations|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:public_relations|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_security_studies_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:security_studies|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:security_studies|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_sociology_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:sociology|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:sociology|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_us_foreign_policy_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:us_foreign_policy|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:us_foreign_policy|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_virology_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:virology|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:virology|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "original_mmlu_world_religions_5", "data_files": [{"split": "2023_08_28T20_58_48.353368", "path": ["**/details_original|mmlu:world_religions|5_2023-08-28T20:58:48.353368.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:world_religions|5_2023-08-28T20:58:48.353368.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_19T22_24_27.765109", "path": ["results_2023-07-19T22:24:27.765109.parquet"]}, {"split": "2023_08_28T20_58_48.353368", "path": ["results_2023-08-28T20:58:48.353368.parquet"]}, {"split": "2023_09_09T12_38_44.623374", "path": ["results_2023-09-09T12-38-44.623374.parquet"]}, {"split": "2023_09_17T20_41_44.085857", "path": ["results_2023-09-17T20-41-44.085857.parquet"]}, {"split": "2023_12_02T22_44_52.889942", "path": ["results_2023-12-02T22-44-52.889942.parquet"]}, {"split": "latest", "path": ["results_2023-12-02T22-44-52.889942.parquet"]}]}]}
2023-12-02T22:45:00+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of facebook/opt-30b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model facebook/opt-30b on the Open LLM Leaderboard. The dataset is composed of 122 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 5 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-02T22:44:52.889942(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of facebook/opt-30b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model facebook/opt-30b on the Open LLM Leaderboard.\n\nThe dataset is composed of 122 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 5 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-02T22:44:52.889942(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of facebook/opt-30b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model facebook/opt-30b on the Open LLM Leaderboard.\n\nThe dataset is composed of 122 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 5 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-02T22:44:52.889942(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 15, 31, 164, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of facebook/opt-30b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model facebook/opt-30b on the Open LLM Leaderboard.\n\nThe dataset is composed of 122 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 5 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-02T22:44:52.889942(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
d0ecc3eef4569d0f793e2dee24adb234274d161a
# Dataset Card for Evaluation run of None ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/None - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [None](https://huggingface.co/None) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 119 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_facebook__galactica-30b", "original_mmlu_world_religions_5", split="train") ``` ## Latest results These are the [latest results from run 2023-08-28T21:10:05.029353](https://huggingface.co/datasets/open-llm-leaderboard/details_facebook__galactica-30b/blob/main/results_2023-08-28T21%3A10%3A05.029353.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.4666487872974609, "acc_stderr": 0.036447127375734134 }, "original|mmlu:abstract_algebra|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446 }, "original|mmlu:anatomy|5": { "acc": 0.5407407407407407, "acc_stderr": 0.04304979692464242 }, "original|mmlu:astronomy|5": { "acc": 0.506578947368421, "acc_stderr": 0.040685900502249704 }, "original|mmlu:business_ethics|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332 }, "original|mmlu:clinical_knowledge|5": { "acc": 0.5471698113207547, "acc_stderr": 0.030635627957961823 }, "original|mmlu:college_biology|5": { "acc": 0.5694444444444444, "acc_stderr": 0.04140685639111502 }, "original|mmlu:college_chemistry|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504 }, "original|mmlu:college_computer_science|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025 }, "original|mmlu:college_mathematics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235 }, "original|mmlu:college_medicine|5": { "acc": 0.5028901734104047, "acc_stderr": 0.03812400565974834 }, "original|mmlu:college_physics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.04690650298201943 }, "original|mmlu:computer_security|5": { "acc": 0.65, "acc_stderr": 0.0479372485441102 }, "original|mmlu:conceptual_physics|5": { "acc": 0.4765957446808511, "acc_stderr": 0.03265019475033581 }, "original|mmlu:econometrics|5": { "acc": 0.3684210526315789, "acc_stderr": 0.04537815354939391 }, "original|mmlu:electrical_engineering|5": { "acc": 0.5862068965517241, "acc_stderr": 0.04104269211806232 }, "original|mmlu:elementary_mathematics|5": { "acc": 0.31216931216931215, "acc_stderr": 0.023865206836972585 }, "original|mmlu:formal_logic|5": { "acc": 0.2698412698412698, "acc_stderr": 0.03970158273235172 }, "original|mmlu:global_facts|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316 }, "original|mmlu:high_school_biology|5": { "acc": 0.5548387096774193, "acc_stderr": 0.028272410186214906 }, "original|mmlu:high_school_chemistry|5": { "acc": 0.39408866995073893, "acc_stderr": 0.034381579670365446 }, "original|mmlu:high_school_computer_science|5": { "acc": 0.48, "acc_stderr": 0.05021167315686781 }, "original|mmlu:high_school_european_history|5": { "acc": 0.5818181818181818, "acc_stderr": 0.03851716319398393 }, "original|mmlu:high_school_geography|5": { "acc": 0.5353535353535354, "acc_stderr": 0.03553436368828063 }, "original|mmlu:high_school_government_and_politics|5": { "acc": 0.5595854922279793, "acc_stderr": 0.03582724530036093 }, "original|mmlu:high_school_macroeconomics|5": { "acc": 0.4230769230769231, "acc_stderr": 0.025049197876042338 }, "original|mmlu:high_school_mathematics|5": { "acc": 0.2518518518518518, "acc_stderr": 0.026466117538959905 }, "original|mmlu:high_school_microeconomics|5": { "acc": 0.4579831932773109, "acc_stderr": 0.03236361111951941 }, "original|mmlu:high_school_physics|5": { "acc": 0.3576158940397351, "acc_stderr": 0.03913453431177258 }, "original|mmlu:high_school_psychology|5": { "acc": 0.6293577981651376, "acc_stderr": 0.02070745816435298 }, "original|mmlu:high_school_statistics|5": { "acc": 0.33796296296296297, "acc_stderr": 0.03225941352631295 }, "original|mmlu:high_school_us_history|5": { "acc": 0.4411764705882353, "acc_stderr": 0.03484941514429231 }, "original|mmlu:high_school_world_history|5": { "acc": 0.6033755274261603, "acc_stderr": 0.03184399873811225 }, "original|mmlu:human_aging|5": { "acc": 0.5515695067264574, "acc_stderr": 0.033378837362550984 }, "original|mmlu:human_sexuality|5": { "acc": 0.5801526717557252, "acc_stderr": 0.043285772152629715 }, "original|mmlu:international_law|5": { "acc": 0.6528925619834711, "acc_stderr": 0.04345724570292534 }, "original|mmlu:jurisprudence|5": { "acc": 0.5185185185185185, "acc_stderr": 0.04830366024635331 }, "original|mmlu:logical_fallacies|5": { "acc": 0.50920245398773, "acc_stderr": 0.03927705600787443 }, "original|mmlu:machine_learning|5": { "acc": 0.33035714285714285, "acc_stderr": 0.04464285714285712 }, "original|mmlu:management|5": { "acc": 0.6019417475728155, "acc_stderr": 0.048467482539772386 }, "original|mmlu:marketing|5": { "acc": 0.6965811965811965, "acc_stderr": 0.03011821010694266 }, "original|mmlu:medical_genetics|5": { "acc": 0.58, "acc_stderr": 0.049604496374885836 }, "original|mmlu:miscellaneous|5": { "acc": 0.4942528735632184, "acc_stderr": 0.01787878232612923 }, "original|mmlu:moral_disputes|5": { "acc": 0.4479768786127168, "acc_stderr": 0.026772990653361826 }, "original|mmlu:moral_scenarios|5": { "acc": 0.2435754189944134, "acc_stderr": 0.014355911964767864 }, "original|mmlu:nutrition|5": { "acc": 0.5196078431372549, "acc_stderr": 0.028607893699576073 }, "original|mmlu:philosophy|5": { "acc": 0.48231511254019294, "acc_stderr": 0.02838032284907713 }, "original|mmlu:prehistory|5": { "acc": 0.5401234567901234, "acc_stderr": 0.027731022753539277 }, "original|mmlu:professional_accounting|5": { "acc": 0.35815602836879434, "acc_stderr": 0.028602085862759422 }, "original|mmlu:professional_law|5": { "acc": 0.34028683181225555, "acc_stderr": 0.012101217610223794 }, "original|mmlu:professional_medicine|5": { "acc": 0.44485294117647056, "acc_stderr": 0.03018753206032939 }, "original|mmlu:professional_psychology|5": { "acc": 0.5130718954248366, "acc_stderr": 0.020220920829626916 }, "original|mmlu:public_relations|5": { "acc": 0.5272727272727272, "acc_stderr": 0.04782001791380061 }, "original|mmlu:security_studies|5": { "acc": 0.4, "acc_stderr": 0.03136250240935893 }, "original|mmlu:sociology|5": { "acc": 0.5422885572139303, "acc_stderr": 0.035228658640995975 }, "original|mmlu:us_foreign_policy|5": { "acc": 0.49, "acc_stderr": 0.05024183937956912 }, "original|mmlu:virology|5": { "acc": 0.4397590361445783, "acc_stderr": 0.03864139923699121 }, "original|mmlu:world_religions|5": { "acc": 0.4327485380116959, "acc_stderr": 0.03799978644370607 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_facebook__galactica-30b
[ "region:us" ]
2023-08-18T10:09:32+00:00
{"pretty_name": "Evaluation run of None", "dataset_summary": "Dataset automatically created during the evaluation run of model [None](https://huggingface.co/None) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 119 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_facebook__galactica-30b\",\n\t\"original_mmlu_world_religions_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-08-28T21:10:05.029353](https://huggingface.co/datasets/open-llm-leaderboard/details_facebook__galactica-30b/blob/main/results_2023-08-28T21%3A10%3A05.029353.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.4666487872974609,\n \"acc_stderr\": 0.036447127375734134\n },\n \"original|mmlu:abstract_algebra|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446\n },\n \"original|mmlu:anatomy|5\": {\n \"acc\": 0.5407407407407407,\n \"acc_stderr\": 0.04304979692464242\n },\n \"original|mmlu:astronomy|5\": {\n \"acc\": 0.506578947368421,\n \"acc_stderr\": 0.040685900502249704\n },\n \"original|mmlu:business_ethics|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620332\n },\n \"original|mmlu:clinical_knowledge|5\": {\n \"acc\": 0.5471698113207547,\n \"acc_stderr\": 0.030635627957961823\n },\n \"original|mmlu:college_biology|5\": {\n \"acc\": 0.5694444444444444,\n \"acc_stderr\": 0.04140685639111502\n },\n \"original|mmlu:college_chemistry|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504\n },\n \"original|mmlu:college_computer_science|5\": {\n \"acc\": 0.41,\n \"acc_stderr\": 0.049431107042371025\n },\n \"original|mmlu:college_mathematics|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235\n },\n \"original|mmlu:college_medicine|5\": {\n \"acc\": 0.5028901734104047,\n \"acc_stderr\": 0.03812400565974834\n },\n \"original|mmlu:college_physics|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.04690650298201943\n },\n \"original|mmlu:computer_security|5\": {\n \"acc\": 0.65,\n \"acc_stderr\": 0.0479372485441102\n },\n \"original|mmlu:conceptual_physics|5\": {\n \"acc\": 0.4765957446808511,\n \"acc_stderr\": 0.03265019475033581\n },\n \"original|mmlu:econometrics|5\": {\n \"acc\": 0.3684210526315789,\n \"acc_stderr\": 0.04537815354939391\n },\n \"original|mmlu:electrical_engineering|5\": {\n \"acc\": 0.5862068965517241,\n \"acc_stderr\": 0.04104269211806232\n },\n \"original|mmlu:elementary_mathematics|5\": {\n \"acc\": 0.31216931216931215,\n \"acc_stderr\": 0.023865206836972585\n },\n \"original|mmlu:formal_logic|5\": {\n \"acc\": 0.2698412698412698,\n \"acc_stderr\": 0.03970158273235172\n },\n \"original|mmlu:global_facts|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316\n },\n \"original|mmlu:high_school_biology|5\": {\n \"acc\": 0.5548387096774193,\n \"acc_stderr\": 0.028272410186214906\n },\n \"original|mmlu:high_school_chemistry|5\": {\n \"acc\": 0.39408866995073893,\n \"acc_stderr\": 0.034381579670365446\n },\n \"original|mmlu:high_school_computer_science|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.05021167315686781\n },\n \"original|mmlu:high_school_european_history|5\": {\n \"acc\": 0.5818181818181818,\n \"acc_stderr\": 0.03851716319398393\n },\n \"original|mmlu:high_school_geography|5\": {\n \"acc\": 0.5353535353535354,\n \"acc_stderr\": 0.03553436368828063\n },\n \"original|mmlu:high_school_government_and_politics|5\": {\n \"acc\": 0.5595854922279793,\n \"acc_stderr\": 0.03582724530036093\n },\n \"original|mmlu:high_school_macroeconomics|5\": {\n \"acc\": 0.4230769230769231,\n \"acc_stderr\": 0.025049197876042338\n },\n \"original|mmlu:high_school_mathematics|5\": {\n \"acc\": 0.2518518518518518,\n \"acc_stderr\": 0.026466117538959905\n },\n \"original|mmlu:high_school_microeconomics|5\": {\n \"acc\": 0.4579831932773109,\n \"acc_stderr\": 0.03236361111951941\n },\n \"original|mmlu:high_school_physics|5\": {\n \"acc\": 0.3576158940397351,\n \"acc_stderr\": 0.03913453431177258\n },\n \"original|mmlu:high_school_psychology|5\": {\n \"acc\": 0.6293577981651376,\n \"acc_stderr\": 0.02070745816435298\n },\n \"original|mmlu:high_school_statistics|5\": {\n \"acc\": 0.33796296296296297,\n \"acc_stderr\": 0.03225941352631295\n },\n \"original|mmlu:high_school_us_history|5\": {\n \"acc\": 0.4411764705882353,\n \"acc_stderr\": 0.03484941514429231\n },\n \"original|mmlu:high_school_world_history|5\": {\n \"acc\": 0.6033755274261603,\n \"acc_stderr\": 0.03184399873811225\n },\n \"original|mmlu:human_aging|5\": {\n \"acc\": 0.5515695067264574,\n \"acc_stderr\": 0.033378837362550984\n },\n \"original|mmlu:human_sexuality|5\": {\n \"acc\": 0.5801526717557252,\n \"acc_stderr\": 0.043285772152629715\n },\n \"original|mmlu:international_law|5\": {\n \"acc\": 0.6528925619834711,\n \"acc_stderr\": 0.04345724570292534\n },\n \"original|mmlu:jurisprudence|5\": {\n \"acc\": 0.5185185185185185,\n \"acc_stderr\": 0.04830366024635331\n },\n \"original|mmlu:logical_fallacies|5\": {\n \"acc\": 0.50920245398773,\n \"acc_stderr\": 0.03927705600787443\n },\n \"original|mmlu:machine_learning|5\": {\n \"acc\": 0.33035714285714285,\n \"acc_stderr\": 0.04464285714285712\n },\n \"original|mmlu:management|5\": {\n \"acc\": 0.6019417475728155,\n \"acc_stderr\": 0.048467482539772386\n },\n \"original|mmlu:marketing|5\": {\n \"acc\": 0.6965811965811965,\n \"acc_stderr\": 0.03011821010694266\n },\n \"original|mmlu:medical_genetics|5\": {\n \"acc\": 0.58,\n \"acc_stderr\": 0.049604496374885836\n },\n \"original|mmlu:miscellaneous|5\": {\n \"acc\": 0.4942528735632184,\n \"acc_stderr\": 0.01787878232612923\n },\n \"original|mmlu:moral_disputes|5\": {\n \"acc\": 0.4479768786127168,\n \"acc_stderr\": 0.026772990653361826\n },\n \"original|mmlu:moral_scenarios|5\": {\n \"acc\": 0.2435754189944134,\n \"acc_stderr\": 0.014355911964767864\n },\n \"original|mmlu:nutrition|5\": {\n \"acc\": 0.5196078431372549,\n \"acc_stderr\": 0.028607893699576073\n },\n \"original|mmlu:philosophy|5\": {\n \"acc\": 0.48231511254019294,\n \"acc_stderr\": 0.02838032284907713\n },\n \"original|mmlu:prehistory|5\": {\n \"acc\": 0.5401234567901234,\n \"acc_stderr\": 0.027731022753539277\n },\n \"original|mmlu:professional_accounting|5\": {\n \"acc\": 0.35815602836879434,\n \"acc_stderr\": 0.028602085862759422\n },\n \"original|mmlu:professional_law|5\": {\n \"acc\": 0.34028683181225555,\n \"acc_stderr\": 0.012101217610223794\n },\n \"original|mmlu:professional_medicine|5\": {\n \"acc\": 0.44485294117647056,\n \"acc_stderr\": 0.03018753206032939\n },\n \"original|mmlu:professional_psychology|5\": {\n \"acc\": 0.5130718954248366,\n \"acc_stderr\": 0.020220920829626916\n },\n \"original|mmlu:public_relations|5\": {\n \"acc\": 0.5272727272727272,\n \"acc_stderr\": 0.04782001791380061\n },\n \"original|mmlu:security_studies|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.03136250240935893\n },\n \"original|mmlu:sociology|5\": {\n \"acc\": 0.5422885572139303,\n \"acc_stderr\": 0.035228658640995975\n },\n \"original|mmlu:us_foreign_policy|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956912\n },\n \"original|mmlu:virology|5\": {\n \"acc\": 0.4397590361445783,\n \"acc_stderr\": 0.03864139923699121\n },\n \"original|mmlu:world_religions|5\": {\n \"acc\": 0.4327485380116959,\n \"acc_stderr\": 0.03799978644370607\n }\n}\n```", "repo_url": "https://huggingface.co/None", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|arc:challenge|25_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hellaswag|10_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T22:22:24.729383.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T22:22:24.729383.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T22:22:24.729383.parquet"]}]}, {"config_name": "original_mmlu_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:abstract_algebra|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:anatomy|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:astronomy|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:business_ethics|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:clinical_knowledge|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:college_biology|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:college_chemistry|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:college_computer_science|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:college_mathematics|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:college_medicine|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:college_physics|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:computer_security|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:conceptual_physics|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:econometrics|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:electrical_engineering|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:elementary_mathematics|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:formal_logic|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:global_facts|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:high_school_biology|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:high_school_chemistry|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:high_school_computer_science|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:high_school_european_history|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:high_school_geography|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:high_school_government_and_politics|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:high_school_macroeconomics|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:high_school_mathematics|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:high_school_microeconomics|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:high_school_physics|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:high_school_psychology|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:high_school_statistics|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:high_school_us_history|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:high_school_world_history|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:human_aging|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:human_sexuality|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:international_law|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:jurisprudence|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:logical_fallacies|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:machine_learning|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:management|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:marketing|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:medical_genetics|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:miscellaneous|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:moral_disputes|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:moral_scenarios|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:nutrition|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:philosophy|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:prehistory|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:professional_accounting|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:professional_law|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:professional_medicine|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:professional_psychology|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:public_relations|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:security_studies|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:sociology|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:us_foreign_policy|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:virology|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:world_religions|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:abstract_algebra|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:anatomy|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:astronomy|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:business_ethics|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:clinical_knowledge|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:college_biology|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:college_chemistry|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:college_computer_science|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:college_mathematics|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:college_medicine|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:college_physics|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:computer_security|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:conceptual_physics|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:econometrics|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:electrical_engineering|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:elementary_mathematics|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:formal_logic|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:global_facts|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:high_school_biology|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:high_school_chemistry|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:high_school_computer_science|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:high_school_european_history|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:high_school_geography|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:high_school_government_and_politics|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:high_school_macroeconomics|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:high_school_mathematics|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:high_school_microeconomics|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:high_school_physics|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:high_school_psychology|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:high_school_statistics|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:high_school_us_history|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:high_school_world_history|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:human_aging|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:human_sexuality|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:international_law|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:jurisprudence|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:logical_fallacies|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:machine_learning|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:management|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:marketing|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:medical_genetics|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:miscellaneous|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:moral_disputes|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:moral_scenarios|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:nutrition|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:philosophy|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:prehistory|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:professional_accounting|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:professional_law|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:professional_medicine|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:professional_psychology|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:public_relations|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:security_studies|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:sociology|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:us_foreign_policy|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:virology|5_2023-08-28T21:10:05.029353.parquet", "**/details_original|mmlu:world_religions|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_abstract_algebra_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:abstract_algebra|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:abstract_algebra|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_anatomy_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:anatomy|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:anatomy|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_astronomy_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:astronomy|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:astronomy|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_business_ethics_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:business_ethics|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:business_ethics|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_clinical_knowledge_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:clinical_knowledge|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:clinical_knowledge|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_college_biology_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:college_biology|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:college_biology|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_college_chemistry_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:college_chemistry|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:college_chemistry|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_college_computer_science_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:college_computer_science|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:college_computer_science|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_college_mathematics_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:college_mathematics|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:college_mathematics|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_college_medicine_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:college_medicine|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:college_medicine|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_college_physics_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:college_physics|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:college_physics|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_computer_security_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:computer_security|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:computer_security|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_conceptual_physics_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:conceptual_physics|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:conceptual_physics|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_econometrics_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:econometrics|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:econometrics|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_electrical_engineering_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:electrical_engineering|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:electrical_engineering|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_elementary_mathematics_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:elementary_mathematics|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:elementary_mathematics|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_formal_logic_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:formal_logic|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:formal_logic|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_global_facts_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:global_facts|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:global_facts|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_high_school_biology_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:high_school_biology|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_biology|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_high_school_chemistry_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:high_school_chemistry|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_chemistry|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_high_school_computer_science_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:high_school_computer_science|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_computer_science|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_high_school_european_history_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:high_school_european_history|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_european_history|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_high_school_geography_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:high_school_geography|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_geography|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:high_school_government_and_politics|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_government_and_politics|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:high_school_macroeconomics|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_macroeconomics|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_high_school_mathematics_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:high_school_mathematics|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_mathematics|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_high_school_microeconomics_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:high_school_microeconomics|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_microeconomics|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_high_school_physics_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:high_school_physics|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_physics|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_high_school_psychology_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:high_school_psychology|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_psychology|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_high_school_statistics_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:high_school_statistics|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_statistics|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_high_school_us_history_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:high_school_us_history|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_us_history|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_high_school_world_history_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:high_school_world_history|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:high_school_world_history|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_human_aging_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:human_aging|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:human_aging|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_human_sexuality_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:human_sexuality|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:human_sexuality|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_international_law_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:international_law|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:international_law|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_jurisprudence_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:jurisprudence|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:jurisprudence|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_logical_fallacies_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:logical_fallacies|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:logical_fallacies|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_machine_learning_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:machine_learning|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:machine_learning|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_management_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:management|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:management|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_marketing_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:marketing|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:marketing|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_medical_genetics_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:medical_genetics|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:medical_genetics|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_miscellaneous_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:miscellaneous|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:miscellaneous|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_moral_disputes_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:moral_disputes|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:moral_disputes|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_moral_scenarios_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:moral_scenarios|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:moral_scenarios|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_nutrition_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:nutrition|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:nutrition|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_philosophy_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:philosophy|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:philosophy|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_prehistory_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:prehistory|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:prehistory|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_professional_accounting_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:professional_accounting|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:professional_accounting|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_professional_law_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:professional_law|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:professional_law|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_professional_medicine_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:professional_medicine|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:professional_medicine|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_professional_psychology_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:professional_psychology|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:professional_psychology|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_public_relations_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:public_relations|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:public_relations|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_security_studies_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:security_studies|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:security_studies|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_sociology_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:sociology|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:sociology|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_us_foreign_policy_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:us_foreign_policy|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:us_foreign_policy|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_virology_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:virology|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:virology|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "original_mmlu_world_religions_5", "data_files": [{"split": "2023_08_28T21_10_05.029353", "path": ["**/details_original|mmlu:world_religions|5_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["**/details_original|mmlu:world_religions|5_2023-08-28T21:10:05.029353.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_19T22_22_24.729383", "path": ["results_2023-07-19T22:22:24.729383.parquet"]}, {"split": "2023_08_28T21_10_05.029353", "path": ["results_2023-08-28T21:10:05.029353.parquet"]}, {"split": "latest", "path": ["results_2023-08-28T21:10:05.029353.parquet"]}]}]}
2023-08-28T20:10:19+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of None ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model None on the Open LLM Leaderboard. The dataset is composed of 119 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-08-28T21:10:05.029353(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of None", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model None on the Open LLM Leaderboard.\n\nThe dataset is composed of 119 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-08-28T21:10:05.029353(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of None", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model None on the Open LLM Leaderboard.\n\nThe dataset is composed of 119 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-08-28T21:10:05.029353(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 11, 31, 159, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of None## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model None on the Open LLM Leaderboard.\n\nThe dataset is composed of 119 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-08-28T21:10:05.029353(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
7eafe07415bbfff155c64a79290ee1e373c28603
# Dataset Card for Evaluation run of facebook/xglm-7.5B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/facebook/xglm-7.5B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [facebook/xglm-7.5B](https://huggingface.co/facebook/xglm-7.5B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_facebook__xglm-7.5B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-18T03:17:20.065422](https://huggingface.co/datasets/open-llm-leaderboard/details_facebook__xglm-7.5B/blob/main/results_2023-10-18T03-17-20.065422.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.13905201342281878, "em_stderr": 0.0035433720039612262, "f1": 0.18580851510067117, "f1_stderr": 0.0037071149655913006, "acc": 0.294744170711231, "acc_stderr": 0.007574609231774763 }, "harness|drop|3": { "em": 0.13905201342281878, "em_stderr": 0.0035433720039612262, "f1": 0.18580851510067117, "f1_stderr": 0.0037071149655913006 }, "harness|gsm8k|5": { "acc": 0.002274450341167551, "acc_stderr": 0.001312157814867432 }, "harness|winogrande|5": { "acc": 0.5872138910812944, "acc_stderr": 0.013837060648682094 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_facebook__xglm-7.5B
[ "region:us" ]
2023-08-18T10:09:41+00:00
{"pretty_name": "Evaluation run of facebook/xglm-7.5B", "dataset_summary": "Dataset automatically created during the evaluation run of model [facebook/xglm-7.5B](https://huggingface.co/facebook/xglm-7.5B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_facebook__xglm-7.5B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-18T03:17:20.065422](https://huggingface.co/datasets/open-llm-leaderboard/details_facebook__xglm-7.5B/blob/main/results_2023-10-18T03-17-20.065422.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.13905201342281878,\n \"em_stderr\": 0.0035433720039612262,\n \"f1\": 0.18580851510067117,\n \"f1_stderr\": 0.0037071149655913006,\n \"acc\": 0.294744170711231,\n \"acc_stderr\": 0.007574609231774763\n },\n \"harness|drop|3\": {\n \"em\": 0.13905201342281878,\n \"em_stderr\": 0.0035433720039612262,\n \"f1\": 0.18580851510067117,\n \"f1_stderr\": 0.0037071149655913006\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.002274450341167551,\n \"acc_stderr\": 0.001312157814867432\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5872138910812944,\n \"acc_stderr\": 0.013837060648682094\n }\n}\n```", "repo_url": "https://huggingface.co/facebook/xglm-7.5B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|arc:challenge|25_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_18T03_17_20.065422", "path": ["**/details_harness|drop|3_2023-10-18T03-17-20.065422.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-18T03-17-20.065422.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_18T03_17_20.065422", "path": ["**/details_harness|gsm8k|5_2023-10-18T03-17-20.065422.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-18T03-17-20.065422.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hellaswag|10_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T15:31:59.100861.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T15:31:59.100861.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T15:31:59.100861.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_18T03_17_20.065422", "path": ["**/details_harness|winogrande|5_2023-10-18T03-17-20.065422.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-18T03-17-20.065422.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_19T15_31_59.100861", "path": ["results_2023-07-19T15:31:59.100861.parquet"]}, {"split": "2023_10_18T03_17_20.065422", "path": ["results_2023-10-18T03-17-20.065422.parquet"]}, {"split": "latest", "path": ["results_2023-10-18T03-17-20.065422.parquet"]}]}]}
2023-10-18T02:17:32+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of facebook/xglm-7.5B ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model facebook/xglm-7.5B on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-18T03:17:20.065422(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of facebook/xglm-7.5B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model facebook/xglm-7.5B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-18T03:17:20.065422(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of facebook/xglm-7.5B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model facebook/xglm-7.5B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-18T03:17:20.065422(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 17, 31, 165, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of facebook/xglm-7.5B## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model facebook/xglm-7.5B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-18T03:17:20.065422(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
1a835bf4d7713f0cf2df54d99224059e88f35526
# Dataset Card for Evaluation run of facebook/xglm-564M ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/facebook/xglm-564M - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [facebook/xglm-564M](https://huggingface.co/facebook/xglm-564M) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_facebook__xglm-564M", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-15T23:39:39.394377](https://huggingface.co/datasets/open-llm-leaderboard/details_facebook__xglm-564M/blob/main/results_2023-10-15T23-39-39.394377.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.013422818791946308, "em_stderr": 0.0011784931108563684, "f1": 0.060359689597315525, "f1_stderr": 0.0017160396766447692, "acc": 0.2623842654231489, "acc_stderr": 0.007675207819463649 }, "harness|drop|3": { "em": 0.013422818791946308, "em_stderr": 0.0011784931108563684, "f1": 0.060359689597315525, "f1_stderr": 0.0017160396766447692 }, "harness|gsm8k|5": { "acc": 0.002274450341167551, "acc_stderr": 0.001312157814867416 }, "harness|winogrande|5": { "acc": 0.5224940805051302, "acc_stderr": 0.014038257824059881 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_facebook__xglm-564M
[ "region:us" ]
2023-08-18T10:09:51+00:00
{"pretty_name": "Evaluation run of facebook/xglm-564M", "dataset_summary": "Dataset automatically created during the evaluation run of model [facebook/xglm-564M](https://huggingface.co/facebook/xglm-564M) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_facebook__xglm-564M\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-15T23:39:39.394377](https://huggingface.co/datasets/open-llm-leaderboard/details_facebook__xglm-564M/blob/main/results_2023-10-15T23-39-39.394377.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.013422818791946308,\n \"em_stderr\": 0.0011784931108563684,\n \"f1\": 0.060359689597315525,\n \"f1_stderr\": 0.0017160396766447692,\n \"acc\": 0.2623842654231489,\n \"acc_stderr\": 0.007675207819463649\n },\n \"harness|drop|3\": {\n \"em\": 0.013422818791946308,\n \"em_stderr\": 0.0011784931108563684,\n \"f1\": 0.060359689597315525,\n \"f1_stderr\": 0.0017160396766447692\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.002274450341167551,\n \"acc_stderr\": 0.001312157814867416\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5224940805051302,\n \"acc_stderr\": 0.014038257824059881\n }\n}\n```", "repo_url": "https://huggingface.co/facebook/xglm-564M", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|arc:challenge|25_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_15T23_39_39.394377", "path": ["**/details_harness|drop|3_2023-10-15T23-39-39.394377.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-15T23-39-39.394377.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_15T23_39_39.394377", "path": ["**/details_harness|gsm8k|5_2023-10-15T23-39-39.394377.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-15T23-39-39.394377.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hellaswag|10_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T14:24:31.422133.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T14:24:31.422133.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T14:24:31.422133.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_15T23_39_39.394377", "path": ["**/details_harness|winogrande|5_2023-10-15T23-39-39.394377.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-15T23-39-39.394377.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_19T14_24_31.422133", "path": ["results_2023-07-19T14:24:31.422133.parquet"]}, {"split": "2023_10_15T23_39_39.394377", "path": ["results_2023-10-15T23-39-39.394377.parquet"]}, {"split": "latest", "path": ["results_2023-10-15T23-39-39.394377.parquet"]}]}]}
2023-10-15T22:39:50+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of facebook/xglm-564M ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model facebook/xglm-564M on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-15T23:39:39.394377(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of facebook/xglm-564M", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model facebook/xglm-564M on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-15T23:39:39.394377(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of facebook/xglm-564M", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model facebook/xglm-564M on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-15T23:39:39.394377(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 17, 31, 165, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of facebook/xglm-564M## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model facebook/xglm-564M on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-15T23:39:39.394377(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
6bdd72d60b41d67b67180f26e8517ca65d51f417
# Dataset Card for Evaluation run of facebook/opt-350m ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/facebook/opt-350m - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [facebook/opt-350m](https://huggingface.co/facebook/opt-350m) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_facebook__opt-350m", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-29T05:40:05.173534](https://huggingface.co/datasets/open-llm-leaderboard/details_facebook__opt-350m/blob/main/results_2023-10-29T05-40-05.173534.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0006291946308724832, "em_stderr": 0.0002568002749723937, "f1": 0.04159815436241622, "f1_stderr": 0.0011509154641292957, "acc": 0.26473650543660054, "acc_stderr": 0.007773698717815887 }, "harness|drop|3": { "em": 0.0006291946308724832, "em_stderr": 0.0002568002749723937, "f1": 0.04159815436241622, "f1_stderr": 0.0011509154641292957 }, "harness|gsm8k|5": { "acc": 0.003032600454890068, "acc_stderr": 0.0015145735612245468 }, "harness|winogrande|5": { "acc": 0.526440410418311, "acc_stderr": 0.014032823874407229 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_facebook__opt-350m
[ "region:us" ]
2023-08-18T10:09:59+00:00
{"pretty_name": "Evaluation run of facebook/opt-350m", "dataset_summary": "Dataset automatically created during the evaluation run of model [facebook/opt-350m](https://huggingface.co/facebook/opt-350m) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_facebook__opt-350m\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-29T05:40:05.173534](https://huggingface.co/datasets/open-llm-leaderboard/details_facebook__opt-350m/blob/main/results_2023-10-29T05-40-05.173534.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0006291946308724832,\n \"em_stderr\": 0.0002568002749723937,\n \"f1\": 0.04159815436241622,\n \"f1_stderr\": 0.0011509154641292957,\n \"acc\": 0.26473650543660054,\n \"acc_stderr\": 0.007773698717815887\n },\n \"harness|drop|3\": {\n \"em\": 0.0006291946308724832,\n \"em_stderr\": 0.0002568002749723937,\n \"f1\": 0.04159815436241622,\n \"f1_stderr\": 0.0011509154641292957\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.003032600454890068,\n \"acc_stderr\": 0.0015145735612245468\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.526440410418311,\n \"acc_stderr\": 0.014032823874407229\n }\n}\n```", "repo_url": "https://huggingface.co/facebook/opt-350m", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|arc:challenge|25_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_24T09_18_34.693204", "path": ["**/details_harness|drop|3_2023-10-24T09-18-34.693204.parquet"]}, {"split": "2023_10_29T05_40_05.173534", "path": ["**/details_harness|drop|3_2023-10-29T05-40-05.173534.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-29T05-40-05.173534.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_24T09_18_34.693204", "path": ["**/details_harness|gsm8k|5_2023-10-24T09-18-34.693204.parquet"]}, {"split": "2023_10_29T05_40_05.173534", "path": ["**/details_harness|gsm8k|5_2023-10-29T05-40-05.173534.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-29T05-40-05.173534.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hellaswag|10_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-24T09:55:50.700214.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-24T09:55:50.700214.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-24T09:55:50.700214.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_24T09_18_34.693204", "path": ["**/details_harness|winogrande|5_2023-10-24T09-18-34.693204.parquet"]}, {"split": "2023_10_29T05_40_05.173534", "path": ["**/details_harness|winogrande|5_2023-10-29T05-40-05.173534.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-29T05-40-05.173534.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_24T09_55_50.700214", "path": ["results_2023-07-24T09:55:50.700214.parquet"]}, {"split": "2023_10_24T09_18_34.693204", "path": ["results_2023-10-24T09-18-34.693204.parquet"]}, {"split": "2023_10_29T05_40_05.173534", "path": ["results_2023-10-29T05-40-05.173534.parquet"]}, {"split": "latest", "path": ["results_2023-10-29T05-40-05.173534.parquet"]}]}]}
2023-10-29T05:40:13+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of facebook/opt-350m ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model facebook/opt-350m on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-29T05:40:05.173534(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of facebook/opt-350m", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model facebook/opt-350m on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-29T05:40:05.173534(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of facebook/opt-350m", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model facebook/opt-350m on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-29T05:40:05.173534(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 16, 31, 164, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of facebook/opt-350m## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model facebook/opt-350m on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-29T05:40:05.173534(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
2da8f48099b4da366066b9a598be99b8d5a0957a
# Dataset Card for Evaluation run of facebook/xglm-4.5B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/facebook/xglm-4.5B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [facebook/xglm-4.5B](https://huggingface.co/facebook/xglm-4.5B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_facebook__xglm-4.5B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-18T23:03:33.960699](https://huggingface.co/datasets/open-llm-leaderboard/details_facebook__xglm-4.5B/blob/main/results_2023-10-18T23-03-33.960699.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.06480704697986577, "em_stderr": 0.0025211656446620548, "f1": 0.11480180369127503, "f1_stderr": 0.002765932447728658, "acc": 0.27580178712796344, "acc_stderr": 0.007648043341953835 }, "harness|drop|3": { "em": 0.06480704697986577, "em_stderr": 0.0025211656446620548, "f1": 0.11480180369127503, "f1_stderr": 0.002765932447728658 }, "harness|gsm8k|5": { "acc": 0.002274450341167551, "acc_stderr": 0.001312157814867431 }, "harness|winogrande|5": { "acc": 0.5493291239147593, "acc_stderr": 0.013983928869040239 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_facebook__xglm-4.5B
[ "region:us" ]
2023-08-18T10:10:08+00:00
{"pretty_name": "Evaluation run of facebook/xglm-4.5B", "dataset_summary": "Dataset automatically created during the evaluation run of model [facebook/xglm-4.5B](https://huggingface.co/facebook/xglm-4.5B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_facebook__xglm-4.5B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-18T23:03:33.960699](https://huggingface.co/datasets/open-llm-leaderboard/details_facebook__xglm-4.5B/blob/main/results_2023-10-18T23-03-33.960699.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.06480704697986577,\n \"em_stderr\": 0.0025211656446620548,\n \"f1\": 0.11480180369127503,\n \"f1_stderr\": 0.002765932447728658,\n \"acc\": 0.27580178712796344,\n \"acc_stderr\": 0.007648043341953835\n },\n \"harness|drop|3\": {\n \"em\": 0.06480704697986577,\n \"em_stderr\": 0.0025211656446620548,\n \"f1\": 0.11480180369127503,\n \"f1_stderr\": 0.002765932447728658\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.002274450341167551,\n \"acc_stderr\": 0.001312157814867431\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5493291239147593,\n \"acc_stderr\": 0.013983928869040239\n }\n}\n```", "repo_url": "https://huggingface.co/facebook/xglm-4.5B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|arc:challenge|25_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_18T23_03_33.960699", "path": ["**/details_harness|drop|3_2023-10-18T23-03-33.960699.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-18T23-03-33.960699.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_18T23_03_33.960699", "path": ["**/details_harness|gsm8k|5_2023-10-18T23-03-33.960699.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-18T23-03-33.960699.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hellaswag|10_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T15:36:54.035673.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T15:36:54.035673.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T15:36:54.035673.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_18T23_03_33.960699", "path": ["**/details_harness|winogrande|5_2023-10-18T23-03-33.960699.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-18T23-03-33.960699.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_19T15_36_54.035673", "path": ["results_2023-07-19T15:36:54.035673.parquet"]}, {"split": "2023_10_18T23_03_33.960699", "path": ["results_2023-10-18T23-03-33.960699.parquet"]}, {"split": "latest", "path": ["results_2023-10-18T23-03-33.960699.parquet"]}]}]}
2023-10-18T22:03:45+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of facebook/xglm-4.5B ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model facebook/xglm-4.5B on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-18T23:03:33.960699(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of facebook/xglm-4.5B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model facebook/xglm-4.5B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-18T23:03:33.960699(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of facebook/xglm-4.5B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model facebook/xglm-4.5B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-18T23:03:33.960699(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 17, 31, 165, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of facebook/xglm-4.5B## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model facebook/xglm-4.5B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-18T23:03:33.960699(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
c7e72a363affddbb7f63566a674b959c8ed01617
# Dataset Card for Evaluation run of facebook/opt-2.7b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/facebook/opt-2.7b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [facebook/opt-2.7b](https://huggingface.co/facebook/opt-2.7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_facebook__opt-2.7b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-19T03:26:05.209079](https://huggingface.co/datasets/open-llm-leaderboard/details_facebook__opt-2.7b/blob/main/results_2023-10-19T03-26-05.209079.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0010486577181208054, "em_stderr": 0.0003314581465219369, "f1": 0.04767407718120815, "f1_stderr": 0.0011986644527763738, "acc": 0.31092412335527203, "acc_stderr": 0.007478442861762106 }, "harness|drop|3": { "em": 0.0010486577181208054, "em_stderr": 0.0003314581465219369, "f1": 0.04767407718120815, "f1_stderr": 0.0011986644527763738 }, "harness|gsm8k|5": { "acc": 0.002274450341167551, "acc_stderr": 0.0013121578148673927 }, "harness|winogrande|5": { "acc": 0.6195737963693765, "acc_stderr": 0.01364472790865682 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_facebook__opt-2.7b
[ "region:us" ]
2023-08-18T10:10:17+00:00
{"pretty_name": "Evaluation run of facebook/opt-2.7b", "dataset_summary": "Dataset automatically created during the evaluation run of model [facebook/opt-2.7b](https://huggingface.co/facebook/opt-2.7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_facebook__opt-2.7b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-19T03:26:05.209079](https://huggingface.co/datasets/open-llm-leaderboard/details_facebook__opt-2.7b/blob/main/results_2023-10-19T03-26-05.209079.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0010486577181208054,\n \"em_stderr\": 0.0003314581465219369,\n \"f1\": 0.04767407718120815,\n \"f1_stderr\": 0.0011986644527763738,\n \"acc\": 0.31092412335527203,\n \"acc_stderr\": 0.007478442861762106\n },\n \"harness|drop|3\": {\n \"em\": 0.0010486577181208054,\n \"em_stderr\": 0.0003314581465219369,\n \"f1\": 0.04767407718120815,\n \"f1_stderr\": 0.0011986644527763738\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.002274450341167551,\n \"acc_stderr\": 0.0013121578148673927\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6195737963693765,\n \"acc_stderr\": 0.01364472790865682\n }\n}\n```", "repo_url": "https://huggingface.co/facebook/opt-2.7b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|arc:challenge|25_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_19T03_26_05.209079", "path": ["**/details_harness|drop|3_2023-10-19T03-26-05.209079.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-19T03-26-05.209079.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_19T03_26_05.209079", "path": ["**/details_harness|gsm8k|5_2023-10-19T03-26-05.209079.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-19T03-26-05.209079.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hellaswag|10_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T16:25:28.050181.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T16:25:28.050181.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T16:25:28.050181.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_19T03_26_05.209079", "path": ["**/details_harness|winogrande|5_2023-10-19T03-26-05.209079.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-19T03-26-05.209079.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_19T16_25_28.050181", "path": ["results_2023-07-19T16:25:28.050181.parquet"]}, {"split": "2023_10_19T03_26_05.209079", "path": ["results_2023-10-19T03-26-05.209079.parquet"]}, {"split": "latest", "path": ["results_2023-10-19T03-26-05.209079.parquet"]}]}]}
2023-10-19T02:27:03+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of facebook/opt-2.7b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model facebook/opt-2.7b on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-19T03:26:05.209079(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of facebook/opt-2.7b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model facebook/opt-2.7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-19T03:26:05.209079(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of facebook/opt-2.7b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model facebook/opt-2.7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-19T03:26:05.209079(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 17, 31, 165, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of facebook/opt-2.7b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model facebook/opt-2.7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-19T03:26:05.209079(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
263c5257d078fb176eb4891a41d0d60d934faffe
# Dataset Card for Evaluation run of Facebook/OPT-125M <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Facebook/OPT-125M](https://huggingface.co/Facebook/OPT-125M) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Facebook__OPT-125M", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-23T14:31:42.504661](https://huggingface.co/datasets/open-llm-leaderboard/details_Facebook__OPT-125M/blob/main/results_2024-01-23T14-31-42.504661.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.25971933524807705, "acc_stderr": 0.030727814194809005, "acc_norm": 0.26053348115143415, "acc_norm_stderr": 0.03151920852026647, "mc1": 0.23990208078335373, "mc1_stderr": 0.014948812679062133, "mc2": 0.42868550699768687, "mc2_stderr": 0.01505826026535896 }, "harness|arc:challenge|25": { "acc": 0.20392491467576793, "acc_stderr": 0.011774262478702256, "acc_norm": 0.22866894197952217, "acc_norm_stderr": 0.012272853582540792 }, "harness|hellaswag|10": { "acc": 0.2920732921728739, "acc_stderr": 0.004537865171414025, "acc_norm": 0.3143796056562438, "acc_norm_stderr": 0.00463319482579384 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.27, "acc_stderr": 0.044619604333847415, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847415 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.23703703703703705, "acc_stderr": 0.03673731683969506, "acc_norm": 0.23703703703703705, "acc_norm_stderr": 0.03673731683969506 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.2631578947368421, "acc_stderr": 0.03583496176361062, "acc_norm": 0.2631578947368421, "acc_norm_stderr": 0.03583496176361062 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.21509433962264152, "acc_stderr": 0.025288394502891363, "acc_norm": 0.21509433962264152, "acc_norm_stderr": 0.025288394502891363 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2222222222222222, "acc_stderr": 0.03476590104304134, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.03476590104304134 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.33, "acc_stderr": 0.04725815626252604, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252604 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.26, "acc_stderr": 0.04408440022768077, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768077 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.2023121387283237, "acc_stderr": 0.03063114553919882, "acc_norm": 0.2023121387283237, "acc_norm_stderr": 0.03063114553919882 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.37254901960784315, "acc_stderr": 0.04810840148082633, "acc_norm": 0.37254901960784315, "acc_norm_stderr": 0.04810840148082633 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.18, "acc_stderr": 0.038612291966536955, "acc_norm": 0.18, "acc_norm_stderr": 0.038612291966536955 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.3148936170212766, "acc_stderr": 0.03036358219723816, "acc_norm": 0.3148936170212766, "acc_norm_stderr": 0.03036358219723816 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.039994238792813344, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.039994238792813344 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2482758620689655, "acc_stderr": 0.0360010569272777, "acc_norm": 0.2482758620689655, "acc_norm_stderr": 0.0360010569272777 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2566137566137566, "acc_stderr": 0.022494510767503154, "acc_norm": 0.2566137566137566, "acc_norm_stderr": 0.022494510767503154 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.14285714285714285, "acc_stderr": 0.03129843185743809, "acc_norm": 0.14285714285714285, "acc_norm_stderr": 0.03129843185743809 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.18, "acc_stderr": 0.038612291966536934, "acc_norm": 0.18, "acc_norm_stderr": 0.038612291966536934 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.3161290322580645, "acc_stderr": 0.02645087448904277, "acc_norm": 0.3161290322580645, "acc_norm_stderr": 0.02645087448904277 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.2955665024630542, "acc_stderr": 0.032104944337514575, "acc_norm": 0.2955665024630542, "acc_norm_stderr": 0.032104944337514575 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.19, "acc_stderr": 0.039427724440366234, "acc_norm": 0.19, "acc_norm_stderr": 0.039427724440366234 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.21212121212121213, "acc_stderr": 0.03192271569548299, "acc_norm": 0.21212121212121213, "acc_norm_stderr": 0.03192271569548299 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.2727272727272727, "acc_stderr": 0.03173071239071724, "acc_norm": 0.2727272727272727, "acc_norm_stderr": 0.03173071239071724 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.36787564766839376, "acc_stderr": 0.03480175668466036, "acc_norm": 0.36787564766839376, "acc_norm_stderr": 0.03480175668466036 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.34102564102564104, "acc_stderr": 0.02403548967633506, "acc_norm": 0.34102564102564104, "acc_norm_stderr": 0.02403548967633506 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.26296296296296295, "acc_stderr": 0.026842057873833706, "acc_norm": 0.26296296296296295, "acc_norm_stderr": 0.026842057873833706 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.35294117647058826, "acc_stderr": 0.031041941304059288, "acc_norm": 0.35294117647058826, "acc_norm_stderr": 0.031041941304059288 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31788079470198677, "acc_stderr": 0.038020397601079024, "acc_norm": 0.31788079470198677, "acc_norm_stderr": 0.038020397601079024 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.23119266055045873, "acc_stderr": 0.01807575024163315, "acc_norm": 0.23119266055045873, "acc_norm_stderr": 0.01807575024163315 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4722222222222222, "acc_stderr": 0.0340470532865388, "acc_norm": 0.4722222222222222, "acc_norm_stderr": 0.0340470532865388 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.25980392156862747, "acc_stderr": 0.03077855467869326, "acc_norm": 0.25980392156862747, "acc_norm_stderr": 0.03077855467869326 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.25738396624472576, "acc_stderr": 0.02845882099146031, "acc_norm": 0.25738396624472576, "acc_norm_stderr": 0.02845882099146031 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.20179372197309417, "acc_stderr": 0.026936111912802273, "acc_norm": 0.20179372197309417, "acc_norm_stderr": 0.026936111912802273 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.2366412213740458, "acc_stderr": 0.03727673575596918, "acc_norm": 0.2366412213740458, "acc_norm_stderr": 0.03727673575596918 }, "harness|hendrycksTest-international_law|5": { "acc": 0.38016528925619836, "acc_stderr": 0.04431324501968432, "acc_norm": 0.38016528925619836, "acc_norm_stderr": 0.04431324501968432 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.21296296296296297, "acc_stderr": 0.0395783547198098, "acc_norm": 0.21296296296296297, "acc_norm_stderr": 0.0395783547198098 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.22085889570552147, "acc_stderr": 0.032591773927421776, "acc_norm": 0.22085889570552147, "acc_norm_stderr": 0.032591773927421776 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.15178571428571427, "acc_stderr": 0.034057028381856924, "acc_norm": 0.15178571428571427, "acc_norm_stderr": 0.034057028381856924 }, "harness|hendrycksTest-management|5": { "acc": 0.18446601941747573, "acc_stderr": 0.03840423627288276, "acc_norm": 0.18446601941747573, "acc_norm_stderr": 0.03840423627288276 }, "harness|hendrycksTest-marketing|5": { "acc": 0.19658119658119658, "acc_stderr": 0.02603538609895129, "acc_norm": 0.19658119658119658, "acc_norm_stderr": 0.02603538609895129 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.34, "acc_stderr": 0.047609522856952344, "acc_norm": 0.34, "acc_norm_stderr": 0.047609522856952344 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.2515964240102171, "acc_stderr": 0.01551732236552963, "acc_norm": 0.2515964240102171, "acc_norm_stderr": 0.01551732236552963 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.23121387283236994, "acc_stderr": 0.02269865716785571, "acc_norm": 0.23121387283236994, "acc_norm_stderr": 0.02269865716785571 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.014333522059217889, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.014333522059217889 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.27124183006535946, "acc_stderr": 0.02545775669666788, "acc_norm": 0.27124183006535946, "acc_norm_stderr": 0.02545775669666788 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.2379421221864952, "acc_stderr": 0.024185150647818707, "acc_norm": 0.2379421221864952, "acc_norm_stderr": 0.024185150647818707 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.2932098765432099, "acc_stderr": 0.025329888171900926, "acc_norm": 0.2932098765432099, "acc_norm_stderr": 0.025329888171900926 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.2624113475177305, "acc_stderr": 0.026244920349843007, "acc_norm": 0.2624113475177305, "acc_norm_stderr": 0.026244920349843007 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.25358539765319427, "acc_stderr": 0.011111715336101132, "acc_norm": 0.25358539765319427, "acc_norm_stderr": 0.011111715336101132 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.4485294117647059, "acc_stderr": 0.030211479609121593, "acc_norm": 0.4485294117647059, "acc_norm_stderr": 0.030211479609121593 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.21895424836601307, "acc_stderr": 0.016729937565537537, "acc_norm": 0.21895424836601307, "acc_norm_stderr": 0.016729937565537537 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.22727272727272727, "acc_stderr": 0.04013964554072774, "acc_norm": 0.22727272727272727, "acc_norm_stderr": 0.04013964554072774 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.24897959183673468, "acc_stderr": 0.027682979522960234, "acc_norm": 0.24897959183673468, "acc_norm_stderr": 0.027682979522960234 }, "harness|hendrycksTest-sociology|5": { "acc": 0.23383084577114427, "acc_stderr": 0.029929415408348398, "acc_norm": 0.23383084577114427, "acc_norm_stderr": 0.029929415408348398 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|hendrycksTest-virology|5": { "acc": 0.20481927710843373, "acc_stderr": 0.03141784291663926, "acc_norm": 0.20481927710843373, "acc_norm_stderr": 0.03141784291663926 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.17543859649122806, "acc_stderr": 0.029170885500727654, "acc_norm": 0.17543859649122806, "acc_norm_stderr": 0.029170885500727654 }, "harness|truthfulqa:mc|0": { "mc1": 0.23990208078335373, "mc1_stderr": 0.014948812679062133, "mc2": 0.42868550699768687, "mc2_stderr": 0.01505826026535896 }, "harness|winogrande|5": { "acc": 0.516179952644041, "acc_stderr": 0.014045126130978601 }, "harness|gsm8k|5": { "acc": 0.002274450341167551, "acc_stderr": 0.0013121578148674316 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_facebook__opt-125m
[ "region:us" ]
2023-08-18T10:10:34+00:00
{"pretty_name": "Evaluation run of Facebook/OPT-125M", "dataset_summary": "Dataset automatically created during the evaluation run of model [Facebook/OPT-125M](https://huggingface.co/Facebook/OPT-125M) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Facebook__OPT-125M\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-23T14:31:42.504661](https://huggingface.co/datasets/open-llm-leaderboard/details_Facebook__OPT-125M/blob/main/results_2024-01-23T14-31-42.504661.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.25971933524807705,\n \"acc_stderr\": 0.030727814194809005,\n \"acc_norm\": 0.26053348115143415,\n \"acc_norm_stderr\": 0.03151920852026647,\n \"mc1\": 0.23990208078335373,\n \"mc1_stderr\": 0.014948812679062133,\n \"mc2\": 0.42868550699768687,\n \"mc2_stderr\": 0.01505826026535896\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.20392491467576793,\n \"acc_stderr\": 0.011774262478702256,\n \"acc_norm\": 0.22866894197952217,\n \"acc_norm_stderr\": 0.012272853582540792\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.2920732921728739,\n \"acc_stderr\": 0.004537865171414025,\n \"acc_norm\": 0.3143796056562438,\n \"acc_norm_stderr\": 0.00463319482579384\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847415,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847415\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.23703703703703705,\n \"acc_stderr\": 0.03673731683969506,\n \"acc_norm\": 0.23703703703703705,\n \"acc_norm_stderr\": 0.03673731683969506\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.2631578947368421,\n \"acc_stderr\": 0.03583496176361062,\n \"acc_norm\": 0.2631578947368421,\n \"acc_norm_stderr\": 0.03583496176361062\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.21509433962264152,\n \"acc_stderr\": 0.025288394502891363,\n \"acc_norm\": 0.21509433962264152,\n \"acc_norm_stderr\": 0.025288394502891363\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2222222222222222,\n \"acc_stderr\": 0.03476590104304134,\n \"acc_norm\": 0.2222222222222222,\n \"acc_norm_stderr\": 0.03476590104304134\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252604,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252604\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768077,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768077\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.2023121387283237,\n \"acc_stderr\": 0.03063114553919882,\n \"acc_norm\": 0.2023121387283237,\n \"acc_norm_stderr\": 0.03063114553919882\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.37254901960784315,\n \"acc_stderr\": 0.04810840148082633,\n \"acc_norm\": 0.37254901960784315,\n \"acc_norm_stderr\": 0.04810840148082633\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.18,\n \"acc_stderr\": 0.038612291966536955,\n \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.038612291966536955\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.3148936170212766,\n \"acc_stderr\": 0.03036358219723816,\n \"acc_norm\": 0.3148936170212766,\n \"acc_norm_stderr\": 0.03036358219723816\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.23684210526315788,\n \"acc_stderr\": 0.039994238792813344,\n \"acc_norm\": 0.23684210526315788,\n \"acc_norm_stderr\": 0.039994238792813344\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.2482758620689655,\n \"acc_stderr\": 0.0360010569272777,\n \"acc_norm\": 0.2482758620689655,\n \"acc_norm_stderr\": 0.0360010569272777\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.2566137566137566,\n \"acc_stderr\": 0.022494510767503154,\n \"acc_norm\": 0.2566137566137566,\n \"acc_norm_stderr\": 0.022494510767503154\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.14285714285714285,\n \"acc_stderr\": 0.03129843185743809,\n \"acc_norm\": 0.14285714285714285,\n \"acc_norm_stderr\": 0.03129843185743809\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.18,\n \"acc_stderr\": 0.038612291966536934,\n \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.038612291966536934\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.3161290322580645,\n \"acc_stderr\": 0.02645087448904277,\n \"acc_norm\": 0.3161290322580645,\n \"acc_norm_stderr\": 0.02645087448904277\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.2955665024630542,\n \"acc_stderr\": 0.032104944337514575,\n \"acc_norm\": 0.2955665024630542,\n \"acc_norm_stderr\": 0.032104944337514575\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.19,\n \"acc_stderr\": 0.039427724440366234,\n \"acc_norm\": 0.19,\n \"acc_norm_stderr\": 0.039427724440366234\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.21212121212121213,\n \"acc_stderr\": 0.03192271569548299,\n \"acc_norm\": 0.21212121212121213,\n \"acc_norm_stderr\": 0.03192271569548299\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.2727272727272727,\n \"acc_stderr\": 0.03173071239071724,\n \"acc_norm\": 0.2727272727272727,\n \"acc_norm_stderr\": 0.03173071239071724\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.36787564766839376,\n \"acc_stderr\": 0.03480175668466036,\n \"acc_norm\": 0.36787564766839376,\n \"acc_norm_stderr\": 0.03480175668466036\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.34102564102564104,\n \"acc_stderr\": 0.02403548967633506,\n \"acc_norm\": 0.34102564102564104,\n \"acc_norm_stderr\": 0.02403548967633506\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.26296296296296295,\n \"acc_stderr\": 0.026842057873833706,\n \"acc_norm\": 0.26296296296296295,\n \"acc_norm_stderr\": 0.026842057873833706\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.35294117647058826,\n \"acc_stderr\": 0.031041941304059288,\n \"acc_norm\": 0.35294117647058826,\n \"acc_norm_stderr\": 0.031041941304059288\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.31788079470198677,\n \"acc_stderr\": 0.038020397601079024,\n \"acc_norm\": 0.31788079470198677,\n \"acc_norm_stderr\": 0.038020397601079024\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.23119266055045873,\n \"acc_stderr\": 0.01807575024163315,\n \"acc_norm\": 0.23119266055045873,\n \"acc_norm_stderr\": 0.01807575024163315\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4722222222222222,\n \"acc_stderr\": 0.0340470532865388,\n \"acc_norm\": 0.4722222222222222,\n \"acc_norm_stderr\": 0.0340470532865388\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.25980392156862747,\n \"acc_stderr\": 0.03077855467869326,\n \"acc_norm\": 0.25980392156862747,\n \"acc_norm_stderr\": 0.03077855467869326\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.25738396624472576,\n \"acc_stderr\": 0.02845882099146031,\n \"acc_norm\": 0.25738396624472576,\n \"acc_norm_stderr\": 0.02845882099146031\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.20179372197309417,\n \"acc_stderr\": 0.026936111912802273,\n \"acc_norm\": 0.20179372197309417,\n \"acc_norm_stderr\": 0.026936111912802273\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.2366412213740458,\n \"acc_stderr\": 0.03727673575596918,\n \"acc_norm\": 0.2366412213740458,\n \"acc_norm_stderr\": 0.03727673575596918\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.38016528925619836,\n \"acc_stderr\": 0.04431324501968432,\n \"acc_norm\": 0.38016528925619836,\n \"acc_norm_stderr\": 0.04431324501968432\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.21296296296296297,\n \"acc_stderr\": 0.0395783547198098,\n \"acc_norm\": 0.21296296296296297,\n \"acc_norm_stderr\": 0.0395783547198098\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.22085889570552147,\n \"acc_stderr\": 0.032591773927421776,\n \"acc_norm\": 0.22085889570552147,\n \"acc_norm_stderr\": 0.032591773927421776\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.15178571428571427,\n \"acc_stderr\": 0.034057028381856924,\n \"acc_norm\": 0.15178571428571427,\n \"acc_norm_stderr\": 0.034057028381856924\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.18446601941747573,\n \"acc_stderr\": 0.03840423627288276,\n \"acc_norm\": 0.18446601941747573,\n \"acc_norm_stderr\": 0.03840423627288276\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.19658119658119658,\n \"acc_stderr\": 0.02603538609895129,\n \"acc_norm\": 0.19658119658119658,\n \"acc_norm_stderr\": 0.02603538609895129\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.047609522856952344,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.047609522856952344\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.2515964240102171,\n \"acc_stderr\": 0.01551732236552963,\n \"acc_norm\": 0.2515964240102171,\n \"acc_norm_stderr\": 0.01551732236552963\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.23121387283236994,\n \"acc_stderr\": 0.02269865716785571,\n \"acc_norm\": 0.23121387283236994,\n \"acc_norm_stderr\": 0.02269865716785571\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2424581005586592,\n \"acc_stderr\": 0.014333522059217889,\n \"acc_norm\": 0.2424581005586592,\n \"acc_norm_stderr\": 0.014333522059217889\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.27124183006535946,\n \"acc_stderr\": 0.02545775669666788,\n \"acc_norm\": 0.27124183006535946,\n \"acc_norm_stderr\": 0.02545775669666788\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.2379421221864952,\n \"acc_stderr\": 0.024185150647818707,\n \"acc_norm\": 0.2379421221864952,\n \"acc_norm_stderr\": 0.024185150647818707\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.2932098765432099,\n \"acc_stderr\": 0.025329888171900926,\n \"acc_norm\": 0.2932098765432099,\n \"acc_norm_stderr\": 0.025329888171900926\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.2624113475177305,\n \"acc_stderr\": 0.026244920349843007,\n \"acc_norm\": 0.2624113475177305,\n \"acc_norm_stderr\": 0.026244920349843007\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.25358539765319427,\n \"acc_stderr\": 0.011111715336101132,\n \"acc_norm\": 0.25358539765319427,\n \"acc_norm_stderr\": 0.011111715336101132\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.4485294117647059,\n \"acc_stderr\": 0.030211479609121593,\n \"acc_norm\": 0.4485294117647059,\n \"acc_norm_stderr\": 0.030211479609121593\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.21895424836601307,\n \"acc_stderr\": 0.016729937565537537,\n \"acc_norm\": 0.21895424836601307,\n \"acc_norm_stderr\": 0.016729937565537537\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.22727272727272727,\n \"acc_stderr\": 0.04013964554072774,\n \"acc_norm\": 0.22727272727272727,\n \"acc_norm_stderr\": 0.04013964554072774\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.24897959183673468,\n \"acc_stderr\": 0.027682979522960234,\n \"acc_norm\": 0.24897959183673468,\n \"acc_norm_stderr\": 0.027682979522960234\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.23383084577114427,\n \"acc_stderr\": 0.029929415408348398,\n \"acc_norm\": 0.23383084577114427,\n \"acc_norm_stderr\": 0.029929415408348398\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252605,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252605\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.20481927710843373,\n \"acc_stderr\": 0.03141784291663926,\n \"acc_norm\": 0.20481927710843373,\n \"acc_norm_stderr\": 0.03141784291663926\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.17543859649122806,\n \"acc_stderr\": 0.029170885500727654,\n \"acc_norm\": 0.17543859649122806,\n \"acc_norm_stderr\": 0.029170885500727654\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.23990208078335373,\n \"mc1_stderr\": 0.014948812679062133,\n \"mc2\": 0.42868550699768687,\n \"mc2_stderr\": 0.01505826026535896\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.516179952644041,\n \"acc_stderr\": 0.014045126130978601\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.002274450341167551,\n \"acc_stderr\": 0.0013121578148674316\n }\n}\n```", "repo_url": "https://huggingface.co/Facebook/OPT-125M", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|arc:challenge|25_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|arc:challenge|25_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_19T00_45_29.121149", "path": ["**/details_harness|drop|3_2023-10-19T00-45-29.121149.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-19T00-45-29.121149.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_19T00_45_29.121149", "path": ["**/details_harness|gsm8k|5_2023-10-19T00-45-29.121149.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|gsm8k|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hellaswag|10_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hellaswag|10_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T14:00:10.742260.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T14-31-42.504661.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_19T00_45_29.121149", "path": ["**/details_harness|winogrande|5_2023-10-19T00-45-29.121149.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["**/details_harness|winogrande|5_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-23T14-31-42.504661.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_19T14_00_10.742260", "path": ["results_2023-07-19T14:00:10.742260.parquet"]}, {"split": "2023_10_19T00_45_29.121149", "path": ["results_2023-10-19T00-45-29.121149.parquet"]}, {"split": "2024_01_23T14_31_42.504661", "path": ["results_2024-01-23T14-31-42.504661.parquet"]}, {"split": "latest", "path": ["results_2024-01-23T14-31-42.504661.parquet"]}]}]}
2024-01-23T14:33:47+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Facebook/OPT-125M Dataset automatically created during the evaluation run of model Facebook/OPT-125M on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-23T14:31:42.504661(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Facebook/OPT-125M\n\n\n\nDataset automatically created during the evaluation run of model Facebook/OPT-125M on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T14:31:42.504661(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Facebook/OPT-125M\n\n\n\nDataset automatically created during the evaluation run of model Facebook/OPT-125M on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-23T14:31:42.504661(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 175, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Facebook/OPT-125M\n\n\n\nDataset automatically created during the evaluation run of model Facebook/OPT-125M on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-23T14:31:42.504661(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
22b832c1ccced791f7db4b2f4343e26aa4cfd747
# Dataset Card for Evaluation run of facebook/opt-13b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/facebook/opt-13b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [facebook/opt-13b](https://huggingface.co/facebook/opt-13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_facebook__opt-13b", "harness_gsm8k_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-02T16:45:23.193106](https://huggingface.co/datasets/open-llm-leaderboard/details_facebook__opt-13b/blob/main/results_2023-12-02T16-45-23.193106.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.017437452615617893, "acc_stderr": 0.0036054868679982863 }, "harness|gsm8k|5": { "acc": 0.017437452615617893, "acc_stderr": 0.0036054868679982863 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_facebook__opt-13b
[ "region:us" ]
2023-08-18T10:10:42+00:00
{"pretty_name": "Evaluation run of facebook/opt-13b", "dataset_summary": "Dataset automatically created during the evaluation run of model [facebook/opt-13b](https://huggingface.co/facebook/opt-13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_facebook__opt-13b\",\n\t\"harness_gsm8k_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-02T16:45:23.193106](https://huggingface.co/datasets/open-llm-leaderboard/details_facebook__opt-13b/blob/main/results_2023-12-02T16-45-23.193106.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.017437452615617893,\n \"acc_stderr\": 0.0036054868679982863\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.017437452615617893,\n \"acc_stderr\": 0.0036054868679982863\n }\n}\n```", "repo_url": "https://huggingface.co/facebook/opt-13b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|arc:challenge|25_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_09_09T11_29_00.543425", "path": ["**/details_harness|drop|3_2023-09-09T11-29-00.543425.parquet"]}, {"split": "2023_10_13T09_11_13.715169", "path": ["**/details_harness|drop|3_2023-10-13T09-11-13.715169.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-13T09-11-13.715169.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_09_09T11_29_00.543425", "path": ["**/details_harness|gsm8k|5_2023-09-09T11-29-00.543425.parquet"]}, {"split": "2023_10_13T09_11_13.715169", "path": ["**/details_harness|gsm8k|5_2023-10-13T09-11-13.715169.parquet"]}, {"split": "2023_12_02T16_45_23.193106", "path": ["**/details_harness|gsm8k|5_2023-12-02T16-45-23.193106.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-02T16-45-23.193106.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hellaswag|10_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T18:53:15.812068.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T18:53:15.812068.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T18:53:15.812068.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_09_09T11_29_00.543425", "path": ["**/details_harness|winogrande|5_2023-09-09T11-29-00.543425.parquet"]}, {"split": "2023_10_13T09_11_13.715169", "path": ["**/details_harness|winogrande|5_2023-10-13T09-11-13.715169.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-13T09-11-13.715169.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_19T18_53_15.812068", "path": ["results_2023-07-19T18:53:15.812068.parquet"]}, {"split": "2023_09_09T11_29_00.543425", "path": ["results_2023-09-09T11-29-00.543425.parquet"]}, {"split": "2023_10_13T09_11_13.715169", "path": ["results_2023-10-13T09-11-13.715169.parquet"]}, {"split": "2023_12_02T16_45_23.193106", "path": ["results_2023-12-02T16-45-23.193106.parquet"]}, {"split": "latest", "path": ["results_2023-12-02T16-45-23.193106.parquet"]}]}]}
2023-12-02T16:45:31+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of facebook/opt-13b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model facebook/opt-13b on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-02T16:45:23.193106(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of facebook/opt-13b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model facebook/opt-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-02T16:45:23.193106(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of facebook/opt-13b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model facebook/opt-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-02T16:45:23.193106(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 15, 31, 164, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of facebook/opt-13b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model facebook/opt-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-02T16:45:23.193106(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
6762de89a77c884afccb86a90d3cf6c6fcf34d8e
# Dataset Card for Evaluation run of facebook/opt-iml-max-30b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/facebook/opt-iml-max-30b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [facebook/opt-iml-max-30b](https://huggingface.co/facebook/opt-iml-max-30b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_facebook__opt-iml-max-30b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-16T22:55:14.395125](https://huggingface.co/datasets/open-llm-leaderboard/details_facebook__opt-iml-max-30b/blob/main/results_2023-10-16T22-55-14.395125.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.40740352348993286, "em_stderr": 0.005031895046041087, "f1": 0.4369809144295305, "f1_stderr": 0.0049315636477604, "acc": 0.3810966907675028, "acc_stderr": 0.008336483786913143 }, "harness|drop|3": { "em": 0.40740352348993286, "em_stderr": 0.005031895046041087, "f1": 0.4369809144295305, "f1_stderr": 0.0049315636477604 }, "harness|gsm8k|5": { "acc": 0.025018953752843062, "acc_stderr": 0.004302045046564278 }, "harness|winogrande|5": { "acc": 0.7371744277821626, "acc_stderr": 0.012370922527262008 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_facebook__opt-iml-max-30b
[ "region:us" ]
2023-08-18T10:10:51+00:00
{"pretty_name": "Evaluation run of facebook/opt-iml-max-30b", "dataset_summary": "Dataset automatically created during the evaluation run of model [facebook/opt-iml-max-30b](https://huggingface.co/facebook/opt-iml-max-30b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_facebook__opt-iml-max-30b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-16T22:55:14.395125](https://huggingface.co/datasets/open-llm-leaderboard/details_facebook__opt-iml-max-30b/blob/main/results_2023-10-16T22-55-14.395125.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.40740352348993286,\n \"em_stderr\": 0.005031895046041087,\n \"f1\": 0.4369809144295305,\n \"f1_stderr\": 0.0049315636477604,\n \"acc\": 0.3810966907675028,\n \"acc_stderr\": 0.008336483786913143\n },\n \"harness|drop|3\": {\n \"em\": 0.40740352348993286,\n \"em_stderr\": 0.005031895046041087,\n \"f1\": 0.4369809144295305,\n \"f1_stderr\": 0.0049315636477604\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.025018953752843062,\n \"acc_stderr\": 0.004302045046564278\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7371744277821626,\n \"acc_stderr\": 0.012370922527262008\n }\n}\n```", "repo_url": "https://huggingface.co/facebook/opt-iml-max-30b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|arc:challenge|25_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_16T22_55_14.395125", "path": ["**/details_harness|drop|3_2023-10-16T22-55-14.395125.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-16T22-55-14.395125.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_16T22_55_14.395125", "path": ["**/details_harness|gsm8k|5_2023-10-16T22-55-14.395125.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-16T22-55-14.395125.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hellaswag|10_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T13:25:50.575832.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T13:25:50.575832.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T13:25:50.575832.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_16T22_55_14.395125", "path": ["**/details_harness|winogrande|5_2023-10-16T22-55-14.395125.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-16T22-55-14.395125.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_17T13_25_50.575832", "path": ["results_2023-08-17T13:25:50.575832.parquet"]}, {"split": "2023_10_16T22_55_14.395125", "path": ["results_2023-10-16T22-55-14.395125.parquet"]}, {"split": "latest", "path": ["results_2023-10-16T22-55-14.395125.parquet"]}]}]}
2023-10-16T21:55:25+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of facebook/opt-iml-max-30b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model facebook/opt-iml-max-30b on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-16T22:55:14.395125(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of facebook/opt-iml-max-30b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model facebook/opt-iml-max-30b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-16T22:55:14.395125(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of facebook/opt-iml-max-30b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model facebook/opt-iml-max-30b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-16T22:55:14.395125(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 20, 31, 168, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of facebook/opt-iml-max-30b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model facebook/opt-iml-max-30b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-16T22:55:14.395125(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
7208698a93ebf92609a433998951ccef74442758
# Dataset Card for Evaluation run of mrm8488/llama-2-coder-7b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/mrm8488/llama-2-coder-7b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [mrm8488/llama-2-coder-7b](https://huggingface.co/mrm8488/llama-2-coder-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_mrm8488__llama-2-coder-7b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-09-17T16:01:46.788467](https://huggingface.co/datasets/open-llm-leaderboard/details_mrm8488__llama-2-coder-7b/blob/main/results_2023-09-17T16-01-46.788467.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0012583892617449664, "em_stderr": 0.0003630560893118953, "f1": 0.059393875838926136, "f1_stderr": 0.0013484598070009264, "acc": 0.4129021950450372, "acc_stderr": 0.009590867532569068 }, "harness|drop|3": { "em": 0.0012583892617449664, "em_stderr": 0.0003630560893118953, "f1": 0.059393875838926136, "f1_stderr": 0.0013484598070009264 }, "harness|gsm8k|5": { "acc": 0.0712661106899166, "acc_stderr": 0.007086462127954491 }, "harness|winogrande|5": { "acc": 0.7545382794001578, "acc_stderr": 0.012095272937183644 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_mrm8488__llama-2-coder-7b
[ "region:us" ]
2023-08-18T10:10:59+00:00
{"pretty_name": "Evaluation run of mrm8488/llama-2-coder-7b", "dataset_summary": "Dataset automatically created during the evaluation run of model [mrm8488/llama-2-coder-7b](https://huggingface.co/mrm8488/llama-2-coder-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_mrm8488__llama-2-coder-7b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-09-17T16:01:46.788467](https://huggingface.co/datasets/open-llm-leaderboard/details_mrm8488__llama-2-coder-7b/blob/main/results_2023-09-17T16-01-46.788467.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0012583892617449664,\n \"em_stderr\": 0.0003630560893118953,\n \"f1\": 0.059393875838926136,\n \"f1_stderr\": 0.0013484598070009264,\n \"acc\": 0.4129021950450372,\n \"acc_stderr\": 0.009590867532569068\n },\n \"harness|drop|3\": {\n \"em\": 0.0012583892617449664,\n \"em_stderr\": 0.0003630560893118953,\n \"f1\": 0.059393875838926136,\n \"f1_stderr\": 0.0013484598070009264\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0712661106899166,\n \"acc_stderr\": 0.007086462127954491\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7545382794001578,\n \"acc_stderr\": 0.012095272937183644\n }\n}\n```", "repo_url": "https://huggingface.co/mrm8488/llama-2-coder-7b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|arc:challenge|25_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_09_17T16_01_46.788467", "path": ["**/details_harness|drop|3_2023-09-17T16-01-46.788467.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-09-17T16-01-46.788467.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_09_17T16_01_46.788467", "path": ["**/details_harness|gsm8k|5_2023-09-17T16-01-46.788467.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-09-17T16-01-46.788467.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hellaswag|10_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-09T22:53:49.395953.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-09T22:53:49.395953.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-09T22:53:49.395953.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_09_17T16_01_46.788467", "path": ["**/details_harness|winogrande|5_2023-09-17T16-01-46.788467.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-09-17T16-01-46.788467.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_09T22_53_49.395953", "path": ["results_2023-08-09T22:53:49.395953.parquet"]}, {"split": "2023_09_17T16_01_46.788467", "path": ["results_2023-09-17T16-01-46.788467.parquet"]}, {"split": "latest", "path": ["results_2023-09-17T16-01-46.788467.parquet"]}]}]}
2023-09-17T15:01:59+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of mrm8488/llama-2-coder-7b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model mrm8488/llama-2-coder-7b on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-09-17T16:01:46.788467(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of mrm8488/llama-2-coder-7b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model mrm8488/llama-2-coder-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-17T16:01:46.788467(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of mrm8488/llama-2-coder-7b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model mrm8488/llama-2-coder-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-17T16:01:46.788467(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 21, 31, 169, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of mrm8488/llama-2-coder-7b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model mrm8488/llama-2-coder-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-09-17T16:01:46.788467(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
70a591701dded90eaeea0d051d3d6435c6741c21
# Dataset Card for Evaluation run of Yhyu13/oasst-rlhf-2-llama-30b-7k-steps-hf ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/Yhyu13/oasst-rlhf-2-llama-30b-7k-steps-hf - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [Yhyu13/oasst-rlhf-2-llama-30b-7k-steps-hf](https://huggingface.co/Yhyu13/oasst-rlhf-2-llama-30b-7k-steps-hf) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Yhyu13__oasst-rlhf-2-llama-30b-7k-steps-hf", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-09-18T02:17:36.805434](https://huggingface.co/datasets/open-llm-leaderboard/details_Yhyu13__oasst-rlhf-2-llama-30b-7k-steps-hf/blob/main/results_2023-09-18T02-17-36.805434.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.002307046979865772, "em_stderr": 0.0004913221265094571, "f1": 0.07781564597315446, "f1_stderr": 0.0016061766920796063, "acc": 0.5511598739328604, "acc_stderr": 0.012142210957292902 }, "harness|drop|3": { "em": 0.002307046979865772, "em_stderr": 0.0004913221265094571, "f1": 0.07781564597315446, "f1_stderr": 0.0016061766920796063 }, "harness|gsm8k|5": { "acc": 0.3146322971948446, "acc_stderr": 0.012791037227336032 }, "harness|winogrande|5": { "acc": 0.7876874506708761, "acc_stderr": 0.011493384687249773 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_Yhyu13__oasst-rlhf-2-llama-30b-7k-steps-hf
[ "region:us" ]
2023-08-18T10:11:08+00:00
{"pretty_name": "Evaluation run of Yhyu13/oasst-rlhf-2-llama-30b-7k-steps-hf", "dataset_summary": "Dataset automatically created during the evaluation run of model [Yhyu13/oasst-rlhf-2-llama-30b-7k-steps-hf](https://huggingface.co/Yhyu13/oasst-rlhf-2-llama-30b-7k-steps-hf) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Yhyu13__oasst-rlhf-2-llama-30b-7k-steps-hf\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-09-18T02:17:36.805434](https://huggingface.co/datasets/open-llm-leaderboard/details_Yhyu13__oasst-rlhf-2-llama-30b-7k-steps-hf/blob/main/results_2023-09-18T02-17-36.805434.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.002307046979865772,\n \"em_stderr\": 0.0004913221265094571,\n \"f1\": 0.07781564597315446,\n \"f1_stderr\": 0.0016061766920796063,\n \"acc\": 0.5511598739328604,\n \"acc_stderr\": 0.012142210957292902\n },\n \"harness|drop|3\": {\n \"em\": 0.002307046979865772,\n \"em_stderr\": 0.0004913221265094571,\n \"f1\": 0.07781564597315446,\n \"f1_stderr\": 0.0016061766920796063\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.3146322971948446,\n \"acc_stderr\": 0.012791037227336032\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7876874506708761,\n \"acc_stderr\": 0.011493384687249773\n }\n}\n```", "repo_url": "https://huggingface.co/Yhyu13/oasst-rlhf-2-llama-30b-7k-steps-hf", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|arc:challenge|25_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_09_18T02_17_36.805434", "path": ["**/details_harness|drop|3_2023-09-18T02-17-36.805434.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-09-18T02-17-36.805434.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_09_18T02_17_36.805434", "path": ["**/details_harness|gsm8k|5_2023-09-18T02-17-36.805434.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-09-18T02-17-36.805434.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hellaswag|10_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T22:42:38.656530.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T22:42:38.656530.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T22:42:38.656530.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_09_18T02_17_36.805434", "path": ["**/details_harness|winogrande|5_2023-09-18T02-17-36.805434.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-09-18T02-17-36.805434.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_19T22_42_38.656530", "path": ["results_2023-07-19T22:42:38.656530.parquet"]}, {"split": "2023_09_18T02_17_36.805434", "path": ["results_2023-09-18T02-17-36.805434.parquet"]}, {"split": "latest", "path": ["results_2023-09-18T02-17-36.805434.parquet"]}]}]}
2023-09-18T01:17:48+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Yhyu13/oasst-rlhf-2-llama-30b-7k-steps-hf ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model Yhyu13/oasst-rlhf-2-llama-30b-7k-steps-hf on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-09-18T02:17:36.805434(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of Yhyu13/oasst-rlhf-2-llama-30b-7k-steps-hf", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Yhyu13/oasst-rlhf-2-llama-30b-7k-steps-hf on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-18T02:17:36.805434(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Yhyu13/oasst-rlhf-2-llama-30b-7k-steps-hf", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Yhyu13/oasst-rlhf-2-llama-30b-7k-steps-hf on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-18T02:17:36.805434(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 35, 31, 183, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Yhyu13/oasst-rlhf-2-llama-30b-7k-steps-hf## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model Yhyu13/oasst-rlhf-2-llama-30b-7k-steps-hf on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-09-18T02:17:36.805434(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
89df32455435e8ddd486c17cadcff6189b131003
# Dataset Card for Evaluation run of KnutJaegersberg/galactica-orca-wizardlm-1.3b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/KnutJaegersberg/galactica-orca-wizardlm-1.3b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [KnutJaegersberg/galactica-orca-wizardlm-1.3b](https://huggingface.co/KnutJaegersberg/galactica-orca-wizardlm-1.3b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_KnutJaegersberg__galactica-orca-wizardlm-1.3b", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-07-27T10:33:13.422388](https://huggingface.co/datasets/open-llm-leaderboard/details_KnutJaegersberg__galactica-orca-wizardlm-1.3b/blob/main/results_2023-07-27T10%3A33%3A13.422388.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.2607167828475717, "acc_stderr": 0.031890976655530354, "acc_norm": 0.2619942427603414, "acc_norm_stderr": 0.03190140774792886, "mc1": 0.25458996328029376, "mc1_stderr": 0.015250117079156494, "mc2": 0.41268090479367026, "mc2_stderr": 0.015430187488430076 }, "harness|arc:challenge|25": { "acc": 0.27303754266211605, "acc_stderr": 0.013019332762635727, "acc_norm": 0.30887372013651876, "acc_norm_stderr": 0.013501770929344004 }, "harness|hellaswag|10": { "acc": 0.32065325632344155, "acc_stderr": 0.004657738398900916, "acc_norm": 0.3601872137024497, "acc_norm_stderr": 0.004790734683704583 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.2814814814814815, "acc_stderr": 0.038850042458002526, "acc_norm": 0.2814814814814815, "acc_norm_stderr": 0.038850042458002526 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.24342105263157895, "acc_stderr": 0.034923496688842384, "acc_norm": 0.24342105263157895, "acc_norm_stderr": 0.034923496688842384 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.26037735849056604, "acc_stderr": 0.027008766090708104, "acc_norm": 0.26037735849056604, "acc_norm_stderr": 0.027008766090708104 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.3333333333333333, "acc_stderr": 0.03942082639927213, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.03942082639927213 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.17, "acc_stderr": 0.03775251680686371, "acc_norm": 0.17, "acc_norm_stderr": 0.03775251680686371 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.19, "acc_stderr": 0.03942772444036623, "acc_norm": 0.19, "acc_norm_stderr": 0.03942772444036623 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.26, "acc_stderr": 0.04408440022768079, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768079 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.2774566473988439, "acc_stderr": 0.03414014007044036, "acc_norm": 0.2774566473988439, "acc_norm_stderr": 0.03414014007044036 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237655, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237655 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.26382978723404255, "acc_stderr": 0.028809989854102963, "acc_norm": 0.26382978723404255, "acc_norm_stderr": 0.028809989854102963 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2543859649122807, "acc_stderr": 0.040969851398436716, "acc_norm": 0.2543859649122807, "acc_norm_stderr": 0.040969851398436716 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.3310344827586207, "acc_stderr": 0.03921545312467122, "acc_norm": 0.3310344827586207, "acc_norm_stderr": 0.03921545312467122 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2830687830687831, "acc_stderr": 0.023201392938194974, "acc_norm": 0.2830687830687831, "acc_norm_stderr": 0.023201392938194974 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.19047619047619047, "acc_stderr": 0.035122074123020534, "acc_norm": 0.19047619047619047, "acc_norm_stderr": 0.035122074123020534 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.29, "acc_stderr": 0.04560480215720684, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720684 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.2161290322580645, "acc_stderr": 0.02341529343356852, "acc_norm": 0.2161290322580645, "acc_norm_stderr": 0.02341529343356852 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.23645320197044334, "acc_stderr": 0.029896114291733552, "acc_norm": 0.23645320197044334, "acc_norm_stderr": 0.029896114291733552 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.23030303030303031, "acc_stderr": 0.03287666758603489, "acc_norm": 0.23030303030303031, "acc_norm_stderr": 0.03287666758603489 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.20707070707070707, "acc_stderr": 0.028869778460267063, "acc_norm": 0.20707070707070707, "acc_norm_stderr": 0.028869778460267063 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.22279792746113988, "acc_stderr": 0.03003114797764154, "acc_norm": 0.22279792746113988, "acc_norm_stderr": 0.03003114797764154 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.31794871794871793, "acc_stderr": 0.023610884308927865, "acc_norm": 0.31794871794871793, "acc_norm_stderr": 0.023610884308927865 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2777777777777778, "acc_stderr": 0.027309140588230182, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.027309140588230182 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.23529411764705882, "acc_stderr": 0.027553614467863804, "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.027553614467863804 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.24503311258278146, "acc_stderr": 0.03511807571804724, "acc_norm": 0.24503311258278146, "acc_norm_stderr": 0.03511807571804724 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.25137614678899084, "acc_stderr": 0.018599206360287415, "acc_norm": 0.25137614678899084, "acc_norm_stderr": 0.018599206360287415 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.30092592592592593, "acc_stderr": 0.03128039084329883, "acc_norm": 0.30092592592592593, "acc_norm_stderr": 0.03128039084329883 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.2549019607843137, "acc_stderr": 0.030587591351604246, "acc_norm": 0.2549019607843137, "acc_norm_stderr": 0.030587591351604246 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.270042194092827, "acc_stderr": 0.028900721906293426, "acc_norm": 0.270042194092827, "acc_norm_stderr": 0.028900721906293426 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.32286995515695066, "acc_stderr": 0.03138147637575498, "acc_norm": 0.32286995515695066, "acc_norm_stderr": 0.03138147637575498 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.183206106870229, "acc_stderr": 0.033927709264947335, "acc_norm": 0.183206106870229, "acc_norm_stderr": 0.033927709264947335 }, "harness|hendrycksTest-international_law|5": { "acc": 0.24793388429752067, "acc_stderr": 0.03941897526516301, "acc_norm": 0.24793388429752067, "acc_norm_stderr": 0.03941897526516301 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.25, "acc_stderr": 0.04186091791394607, "acc_norm": 0.25, "acc_norm_stderr": 0.04186091791394607 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.2147239263803681, "acc_stderr": 0.032262193772867744, "acc_norm": 0.2147239263803681, "acc_norm_stderr": 0.032262193772867744 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.25892857142857145, "acc_stderr": 0.04157751539865629, "acc_norm": 0.25892857142857145, "acc_norm_stderr": 0.04157751539865629 }, "harness|hendrycksTest-management|5": { "acc": 0.32038834951456313, "acc_stderr": 0.0462028408228004, "acc_norm": 0.32038834951456313, "acc_norm_stderr": 0.0462028408228004 }, "harness|hendrycksTest-marketing|5": { "acc": 0.2777777777777778, "acc_stderr": 0.02934311479809447, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.02934311479809447 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.24393358876117496, "acc_stderr": 0.015357212665829468, "acc_norm": 0.24393358876117496, "acc_norm_stderr": 0.015357212665829468 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.30057803468208094, "acc_stderr": 0.02468531686725781, "acc_norm": 0.30057803468208094, "acc_norm_stderr": 0.02468531686725781 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23687150837988827, "acc_stderr": 0.014219570788103982, "acc_norm": 0.23687150837988827, "acc_norm_stderr": 0.014219570788103982 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.24836601307189543, "acc_stderr": 0.024739981355113596, "acc_norm": 0.24836601307189543, "acc_norm_stderr": 0.024739981355113596 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.2057877813504823, "acc_stderr": 0.022961339906764244, "acc_norm": 0.2057877813504823, "acc_norm_stderr": 0.022961339906764244 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.2839506172839506, "acc_stderr": 0.025089478523765127, "acc_norm": 0.2839506172839506, "acc_norm_stderr": 0.025089478523765127 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.2553191489361702, "acc_stderr": 0.026011992930902, "acc_norm": 0.2553191489361702, "acc_norm_stderr": 0.026011992930902 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.24902216427640156, "acc_stderr": 0.01104489226404077, "acc_norm": 0.24902216427640156, "acc_norm_stderr": 0.01104489226404077 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.2757352941176471, "acc_stderr": 0.027146271936625166, "acc_norm": 0.2757352941176471, "acc_norm_stderr": 0.027146271936625166 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.27941176470588236, "acc_stderr": 0.01815287105153882, "acc_norm": 0.27941176470588236, "acc_norm_stderr": 0.01815287105153882 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.23636363636363636, "acc_stderr": 0.04069306319721376, "acc_norm": 0.23636363636363636, "acc_norm_stderr": 0.04069306319721376 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.19591836734693877, "acc_stderr": 0.025409301953225678, "acc_norm": 0.19591836734693877, "acc_norm_stderr": 0.025409301953225678 }, "harness|hendrycksTest-sociology|5": { "acc": 0.263681592039801, "acc_stderr": 0.031157150869355582, "acc_norm": 0.263681592039801, "acc_norm_stderr": 0.031157150869355582 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-virology|5": { "acc": 0.28313253012048195, "acc_stderr": 0.03507295431370519, "acc_norm": 0.28313253012048195, "acc_norm_stderr": 0.03507295431370519 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.30409356725146197, "acc_stderr": 0.03528211258245232, "acc_norm": 0.30409356725146197, "acc_norm_stderr": 0.03528211258245232 }, "harness|truthfulqa:mc|0": { "mc1": 0.25458996328029376, "mc1_stderr": 0.015250117079156494, "mc2": 0.41268090479367026, "mc2_stderr": 0.015430187488430076 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_KnutJaegersberg__galactica-orca-wizardlm-1.3b
[ "region:us" ]
2023-08-18T10:11:16+00:00
{"pretty_name": "Evaluation run of KnutJaegersberg/galactica-orca-wizardlm-1.3b", "dataset_summary": "Dataset automatically created during the evaluation run of model [KnutJaegersberg/galactica-orca-wizardlm-1.3b](https://huggingface.co/KnutJaegersberg/galactica-orca-wizardlm-1.3b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_KnutJaegersberg__galactica-orca-wizardlm-1.3b\",\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-07-27T10:33:13.422388](https://huggingface.co/datasets/open-llm-leaderboard/details_KnutJaegersberg__galactica-orca-wizardlm-1.3b/blob/main/results_2023-07-27T10%3A33%3A13.422388.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.2607167828475717,\n \"acc_stderr\": 0.031890976655530354,\n \"acc_norm\": 0.2619942427603414,\n \"acc_norm_stderr\": 0.03190140774792886,\n \"mc1\": 0.25458996328029376,\n \"mc1_stderr\": 0.015250117079156494,\n \"mc2\": 0.41268090479367026,\n \"mc2_stderr\": 0.015430187488430076\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.27303754266211605,\n \"acc_stderr\": 0.013019332762635727,\n \"acc_norm\": 0.30887372013651876,\n \"acc_norm_stderr\": 0.013501770929344004\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.32065325632344155,\n \"acc_stderr\": 0.004657738398900916,\n \"acc_norm\": 0.3601872137024497,\n \"acc_norm_stderr\": 0.004790734683704583\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.2814814814814815,\n \"acc_stderr\": 0.038850042458002526,\n \"acc_norm\": 0.2814814814814815,\n \"acc_norm_stderr\": 0.038850042458002526\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.24342105263157895,\n \"acc_stderr\": 0.034923496688842384,\n \"acc_norm\": 0.24342105263157895,\n \"acc_norm_stderr\": 0.034923496688842384\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.26037735849056604,\n \"acc_stderr\": 0.027008766090708104,\n \"acc_norm\": 0.26037735849056604,\n \"acc_norm_stderr\": 0.027008766090708104\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.03942082639927213,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.03942082639927213\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.17,\n \"acc_stderr\": 0.03775251680686371,\n \"acc_norm\": 0.17,\n \"acc_norm_stderr\": 0.03775251680686371\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.19,\n \"acc_stderr\": 0.03942772444036623,\n \"acc_norm\": 0.19,\n \"acc_norm_stderr\": 0.03942772444036623\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768079,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768079\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.2774566473988439,\n \"acc_stderr\": 0.03414014007044036,\n \"acc_norm\": 0.2774566473988439,\n \"acc_norm_stderr\": 0.03414014007044036\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.21568627450980393,\n \"acc_stderr\": 0.04092563958237655,\n \"acc_norm\": 0.21568627450980393,\n \"acc_norm_stderr\": 0.04092563958237655\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.26382978723404255,\n \"acc_stderr\": 0.028809989854102963,\n \"acc_norm\": 0.26382978723404255,\n \"acc_norm_stderr\": 0.028809989854102963\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2543859649122807,\n \"acc_stderr\": 0.040969851398436716,\n \"acc_norm\": 0.2543859649122807,\n \"acc_norm_stderr\": 0.040969851398436716\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.3310344827586207,\n \"acc_stderr\": 0.03921545312467122,\n \"acc_norm\": 0.3310344827586207,\n \"acc_norm_stderr\": 0.03921545312467122\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.2830687830687831,\n \"acc_stderr\": 0.023201392938194974,\n \"acc_norm\": 0.2830687830687831,\n \"acc_norm_stderr\": 0.023201392938194974\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.19047619047619047,\n \"acc_stderr\": 0.035122074123020534,\n \"acc_norm\": 0.19047619047619047,\n \"acc_norm_stderr\": 0.035122074123020534\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720684,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.04560480215720684\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.2161290322580645,\n \"acc_stderr\": 0.02341529343356852,\n \"acc_norm\": 0.2161290322580645,\n \"acc_norm_stderr\": 0.02341529343356852\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.23645320197044334,\n \"acc_stderr\": 0.029896114291733552,\n \"acc_norm\": 0.23645320197044334,\n \"acc_norm_stderr\": 0.029896114291733552\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.23030303030303031,\n \"acc_stderr\": 0.03287666758603489,\n \"acc_norm\": 0.23030303030303031,\n \"acc_norm_stderr\": 0.03287666758603489\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.20707070707070707,\n \"acc_stderr\": 0.028869778460267063,\n \"acc_norm\": 0.20707070707070707,\n \"acc_norm_stderr\": 0.028869778460267063\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.22279792746113988,\n \"acc_stderr\": 0.03003114797764154,\n \"acc_norm\": 0.22279792746113988,\n \"acc_norm_stderr\": 0.03003114797764154\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.31794871794871793,\n \"acc_stderr\": 0.023610884308927865,\n \"acc_norm\": 0.31794871794871793,\n \"acc_norm_stderr\": 0.023610884308927865\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2777777777777778,\n \"acc_stderr\": 0.027309140588230182,\n \"acc_norm\": 0.2777777777777778,\n \"acc_norm_stderr\": 0.027309140588230182\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.23529411764705882,\n \"acc_stderr\": 0.027553614467863804,\n \"acc_norm\": 0.23529411764705882,\n \"acc_norm_stderr\": 0.027553614467863804\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.24503311258278146,\n \"acc_stderr\": 0.03511807571804724,\n \"acc_norm\": 0.24503311258278146,\n \"acc_norm_stderr\": 0.03511807571804724\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.25137614678899084,\n \"acc_stderr\": 0.018599206360287415,\n \"acc_norm\": 0.25137614678899084,\n \"acc_norm_stderr\": 0.018599206360287415\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.30092592592592593,\n \"acc_stderr\": 0.03128039084329883,\n \"acc_norm\": 0.30092592592592593,\n \"acc_norm_stderr\": 0.03128039084329883\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.2549019607843137,\n \"acc_stderr\": 0.030587591351604246,\n \"acc_norm\": 0.2549019607843137,\n \"acc_norm_stderr\": 0.030587591351604246\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.270042194092827,\n \"acc_stderr\": 0.028900721906293426,\n \"acc_norm\": 0.270042194092827,\n \"acc_norm_stderr\": 0.028900721906293426\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.32286995515695066,\n \"acc_stderr\": 0.03138147637575498,\n \"acc_norm\": 0.32286995515695066,\n \"acc_norm_stderr\": 0.03138147637575498\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.183206106870229,\n \"acc_stderr\": 0.033927709264947335,\n \"acc_norm\": 0.183206106870229,\n \"acc_norm_stderr\": 0.033927709264947335\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.24793388429752067,\n \"acc_stderr\": 0.03941897526516301,\n \"acc_norm\": 0.24793388429752067,\n \"acc_norm_stderr\": 0.03941897526516301\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04186091791394607,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04186091791394607\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.2147239263803681,\n \"acc_stderr\": 0.032262193772867744,\n \"acc_norm\": 0.2147239263803681,\n \"acc_norm_stderr\": 0.032262193772867744\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.25892857142857145,\n \"acc_stderr\": 0.04157751539865629,\n \"acc_norm\": 0.25892857142857145,\n \"acc_norm_stderr\": 0.04157751539865629\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.32038834951456313,\n \"acc_stderr\": 0.0462028408228004,\n \"acc_norm\": 0.32038834951456313,\n \"acc_norm_stderr\": 0.0462028408228004\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2777777777777778,\n \"acc_stderr\": 0.02934311479809447,\n \"acc_norm\": 0.2777777777777778,\n \"acc_norm_stderr\": 0.02934311479809447\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.24393358876117496,\n \"acc_stderr\": 0.015357212665829468,\n \"acc_norm\": 0.24393358876117496,\n \"acc_norm_stderr\": 0.015357212665829468\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.30057803468208094,\n \"acc_stderr\": 0.02468531686725781,\n \"acc_norm\": 0.30057803468208094,\n \"acc_norm_stderr\": 0.02468531686725781\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23687150837988827,\n \"acc_stderr\": 0.014219570788103982,\n \"acc_norm\": 0.23687150837988827,\n \"acc_norm_stderr\": 0.014219570788103982\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.24836601307189543,\n \"acc_stderr\": 0.024739981355113596,\n \"acc_norm\": 0.24836601307189543,\n \"acc_norm_stderr\": 0.024739981355113596\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.2057877813504823,\n \"acc_stderr\": 0.022961339906764244,\n \"acc_norm\": 0.2057877813504823,\n \"acc_norm_stderr\": 0.022961339906764244\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.2839506172839506,\n \"acc_stderr\": 0.025089478523765127,\n \"acc_norm\": 0.2839506172839506,\n \"acc_norm_stderr\": 0.025089478523765127\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.2553191489361702,\n \"acc_stderr\": 0.026011992930902,\n \"acc_norm\": 0.2553191489361702,\n \"acc_norm_stderr\": 0.026011992930902\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.24902216427640156,\n \"acc_stderr\": 0.01104489226404077,\n \"acc_norm\": 0.24902216427640156,\n \"acc_norm_stderr\": 0.01104489226404077\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.2757352941176471,\n \"acc_stderr\": 0.027146271936625166,\n \"acc_norm\": 0.2757352941176471,\n \"acc_norm_stderr\": 0.027146271936625166\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.27941176470588236,\n \"acc_stderr\": 0.01815287105153882,\n \"acc_norm\": 0.27941176470588236,\n \"acc_norm_stderr\": 0.01815287105153882\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.23636363636363636,\n \"acc_stderr\": 0.04069306319721376,\n \"acc_norm\": 0.23636363636363636,\n \"acc_norm_stderr\": 0.04069306319721376\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.19591836734693877,\n \"acc_stderr\": 0.025409301953225678,\n \"acc_norm\": 0.19591836734693877,\n \"acc_norm_stderr\": 0.025409301953225678\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.263681592039801,\n \"acc_stderr\": 0.031157150869355582,\n \"acc_norm\": 0.263681592039801,\n \"acc_norm_stderr\": 0.031157150869355582\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.28313253012048195,\n \"acc_stderr\": 0.03507295431370519,\n \"acc_norm\": 0.28313253012048195,\n \"acc_norm_stderr\": 0.03507295431370519\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.30409356725146197,\n \"acc_stderr\": 0.03528211258245232,\n \"acc_norm\": 0.30409356725146197,\n \"acc_norm_stderr\": 0.03528211258245232\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.25458996328029376,\n \"mc1_stderr\": 0.015250117079156494,\n \"mc2\": 0.41268090479367026,\n \"mc2_stderr\": 0.015430187488430076\n }\n}\n```", "repo_url": "https://huggingface.co/KnutJaegersberg/galactica-orca-wizardlm-1.3b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|arc:challenge|25_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hellaswag|10_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-27T10:33:13.422388.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-27T10:33:13.422388.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_27T10_33_13.422388", "path": ["results_2023-07-27T10:33:13.422388.parquet"]}, {"split": "latest", "path": ["results_2023-07-27T10:33:13.422388.parquet"]}]}]}
2023-08-27T11:31:03+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of KnutJaegersberg/galactica-orca-wizardlm-1.3b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model KnutJaegersberg/galactica-orca-wizardlm-1.3b on the Open LLM Leaderboard. The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-07-27T10:33:13.422388 (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of KnutJaegersberg/galactica-orca-wizardlm-1.3b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model KnutJaegersberg/galactica-orca-wizardlm-1.3b on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-07-27T10:33:13.422388 (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of KnutJaegersberg/galactica-orca-wizardlm-1.3b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model KnutJaegersberg/galactica-orca-wizardlm-1.3b on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-07-27T10:33:13.422388 (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 28, 31, 176, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of KnutJaegersberg/galactica-orca-wizardlm-1.3b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model KnutJaegersberg/galactica-orca-wizardlm-1.3b on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-07-27T10:33:13.422388 (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
4d07cba7bbe9fd7e3fa9428ec29845420254d716
# Dataset Card for Evaluation run of KnutJaegersberg/megatron-GPT-2-345m-EvolInstruct ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/KnutJaegersberg/megatron-GPT-2-345m-EvolInstruct - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [KnutJaegersberg/megatron-GPT-2-345m-EvolInstruct](https://huggingface.co/KnutJaegersberg/megatron-GPT-2-345m-EvolInstruct) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_KnutJaegersberg__megatron-GPT-2-345m-EvolInstruct", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-09-22T21:16:44.386502](https://huggingface.co/datasets/open-llm-leaderboard/details_KnutJaegersberg__megatron-GPT-2-345m-EvolInstruct/blob/main/results_2023-09-22T21-16-44.386502.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0032508389261744967, "em_stderr": 0.000582948670855896, "f1": 0.04389052013422833, "f1_stderr": 0.0012654910642229172, "acc": 0.27577067125904975, "acc_stderr": 0.007840478478378102 }, "harness|drop|3": { "em": 0.0032508389261744967, "em_stderr": 0.000582948670855896, "f1": 0.04389052013422833, "f1_stderr": 0.0012654910642229172 }, "harness|gsm8k|5": { "acc": 0.0037907505686125853, "acc_stderr": 0.0016927007401501884 }, "harness|winogrande|5": { "acc": 0.5477505919494869, "acc_stderr": 0.013988256216606017 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_KnutJaegersberg__megatron-GPT-2-345m-EvolInstruct
[ "region:us" ]
2023-08-18T10:11:25+00:00
{"pretty_name": "Evaluation run of KnutJaegersberg/megatron-GPT-2-345m-EvolInstruct", "dataset_summary": "Dataset automatically created during the evaluation run of model [KnutJaegersberg/megatron-GPT-2-345m-EvolInstruct](https://huggingface.co/KnutJaegersberg/megatron-GPT-2-345m-EvolInstruct) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_KnutJaegersberg__megatron-GPT-2-345m-EvolInstruct\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-09-22T21:16:44.386502](https://huggingface.co/datasets/open-llm-leaderboard/details_KnutJaegersberg__megatron-GPT-2-345m-EvolInstruct/blob/main/results_2023-09-22T21-16-44.386502.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0032508389261744967,\n \"em_stderr\": 0.000582948670855896,\n \"f1\": 0.04389052013422833,\n \"f1_stderr\": 0.0012654910642229172,\n \"acc\": 0.27577067125904975,\n \"acc_stderr\": 0.007840478478378102\n },\n \"harness|drop|3\": {\n \"em\": 0.0032508389261744967,\n \"em_stderr\": 0.000582948670855896,\n \"f1\": 0.04389052013422833,\n \"f1_stderr\": 0.0012654910642229172\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0037907505686125853,\n \"acc_stderr\": 0.0016927007401501884\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5477505919494869,\n \"acc_stderr\": 0.013988256216606017\n }\n}\n```", "repo_url": "https://huggingface.co/KnutJaegersberg/megatron-GPT-2-345m-EvolInstruct", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|arc:challenge|25_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_09_22T21_16_44.386502", "path": ["**/details_harness|drop|3_2023-09-22T21-16-44.386502.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-09-22T21-16-44.386502.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_09_22T21_16_44.386502", "path": ["**/details_harness|gsm8k|5_2023-09-22T21-16-44.386502.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-09-22T21-16-44.386502.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hellaswag|10_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T14:09:55.167974.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T14:09:55.167974.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T14:09:55.167974.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_09_22T21_16_44.386502", "path": ["**/details_harness|winogrande|5_2023-09-22T21-16-44.386502.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-09-22T21-16-44.386502.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_19T14_09_55.167974", "path": ["results_2023-07-19T14:09:55.167974.parquet"]}, {"split": "2023_09_22T21_16_44.386502", "path": ["results_2023-09-22T21-16-44.386502.parquet"]}, {"split": "latest", "path": ["results_2023-09-22T21-16-44.386502.parquet"]}]}]}
2023-09-22T20:16:55+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of KnutJaegersberg/megatron-GPT-2-345m-EvolInstruct ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model KnutJaegersberg/megatron-GPT-2-345m-EvolInstruct on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-09-22T21:16:44.386502(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of KnutJaegersberg/megatron-GPT-2-345m-EvolInstruct", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model KnutJaegersberg/megatron-GPT-2-345m-EvolInstruct on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-22T21:16:44.386502(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of KnutJaegersberg/megatron-GPT-2-345m-EvolInstruct", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model KnutJaegersberg/megatron-GPT-2-345m-EvolInstruct on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-22T21:16:44.386502(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 29, 31, 177, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of KnutJaegersberg/megatron-GPT-2-345m-EvolInstruct## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model KnutJaegersberg/megatron-GPT-2-345m-EvolInstruct on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-09-22T21:16:44.386502(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
64cf4345f8a406088ff0bb65bcb3deb549d3ca58
# Dataset Card for Evaluation run of WeOpenML/Alpaca-7B-v1 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/WeOpenML/Alpaca-7B-v1 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [WeOpenML/Alpaca-7B-v1](https://huggingface.co/WeOpenML/Alpaca-7B-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_WeOpenML__Alpaca-7B-v1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-09-23T06:14:35.879724](https://huggingface.co/datasets/open-llm-leaderboard/details_WeOpenML__Alpaca-7B-v1/blob/main/results_2023-09-23T06-14-35.879724.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.19851090604026847, "em_stderr": 0.0040848926295302, "f1": 0.26037332214765113, "f1_stderr": 0.00410815192392868, "acc": 0.3582956402478978, "acc_stderr": 0.006878882736845241 }, "harness|drop|3": { "em": 0.19851090604026847, "em_stderr": 0.0040848926295302, "f1": 0.26037332214765113, "f1_stderr": 0.00410815192392868 }, "harness|gsm8k|5": { "acc": 0.001516300227445034, "acc_stderr": 0.001071779348549261 }, "harness|winogrande|5": { "acc": 0.7150749802683505, "acc_stderr": 0.012685986125141222 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_WeOpenML__Alpaca-7B-v1
[ "region:us" ]
2023-08-18T10:11:34+00:00
{"pretty_name": "Evaluation run of WeOpenML/Alpaca-7B-v1", "dataset_summary": "Dataset automatically created during the evaluation run of model [WeOpenML/Alpaca-7B-v1](https://huggingface.co/WeOpenML/Alpaca-7B-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_WeOpenML__Alpaca-7B-v1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-09-23T06:14:35.879724](https://huggingface.co/datasets/open-llm-leaderboard/details_WeOpenML__Alpaca-7B-v1/blob/main/results_2023-09-23T06-14-35.879724.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.19851090604026847,\n \"em_stderr\": 0.0040848926295302,\n \"f1\": 0.26037332214765113,\n \"f1_stderr\": 0.00410815192392868,\n \"acc\": 0.3582956402478978,\n \"acc_stderr\": 0.006878882736845241\n },\n \"harness|drop|3\": {\n \"em\": 0.19851090604026847,\n \"em_stderr\": 0.0040848926295302,\n \"f1\": 0.26037332214765113,\n \"f1_stderr\": 0.00410815192392868\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.001516300227445034,\n \"acc_stderr\": 0.001071779348549261\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7150749802683505,\n \"acc_stderr\": 0.012685986125141222\n }\n}\n```", "repo_url": "https://huggingface.co/WeOpenML/Alpaca-7B-v1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|arc:challenge|25_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_09_23T06_14_35.879724", "path": ["**/details_harness|drop|3_2023-09-23T06-14-35.879724.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-09-23T06-14-35.879724.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_09_23T06_14_35.879724", "path": ["**/details_harness|gsm8k|5_2023-09-23T06-14-35.879724.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-09-23T06-14-35.879724.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hellaswag|10_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-24T11:38:11.458157.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-24T11:38:11.458157.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-24T11:38:11.458157.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_09_23T06_14_35.879724", "path": ["**/details_harness|winogrande|5_2023-09-23T06-14-35.879724.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-09-23T06-14-35.879724.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_24T11_38_11.458157", "path": ["results_2023-07-24T11:38:11.458157.parquet"]}, {"split": "2023_09_23T06_14_35.879724", "path": ["results_2023-09-23T06-14-35.879724.parquet"]}, {"split": "latest", "path": ["results_2023-09-23T06-14-35.879724.parquet"]}]}]}
2023-09-23T05:14:48+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of WeOpenML/Alpaca-7B-v1 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model WeOpenML/Alpaca-7B-v1 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-09-23T06:14:35.879724(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of WeOpenML/Alpaca-7B-v1", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model WeOpenML/Alpaca-7B-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-23T06:14:35.879724(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of WeOpenML/Alpaca-7B-v1", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model WeOpenML/Alpaca-7B-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-23T06:14:35.879724(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 21, 31, 169, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of WeOpenML/Alpaca-7B-v1## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model WeOpenML/Alpaca-7B-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-09-23T06:14:35.879724(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
5586e33eee7c36ce666fed0c577e821003a7c8cb
# Dataset Card for Evaluation run of WeOpenML/PandaLM-Alpaca-7B-v1 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/WeOpenML/PandaLM-Alpaca-7B-v1 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [WeOpenML/PandaLM-Alpaca-7B-v1](https://huggingface.co/WeOpenML/PandaLM-Alpaca-7B-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_WeOpenML__PandaLM-Alpaca-7B-v1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-09-22T22:33:33.888453](https://huggingface.co/datasets/open-llm-leaderboard/details_WeOpenML__PandaLM-Alpaca-7B-v1/blob/main/results_2023-09-22T22-33-33.888453.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0856753355704698, "em_stderr": 0.0028662744739059616, "f1": 0.15614618288590562, "f1_stderr": 0.003108423155895864, "acc": 0.3640595557731007, "acc_stderr": 0.007623933803325749 }, "harness|drop|3": { "em": 0.0856753355704698, "em_stderr": 0.0028662744739059616, "f1": 0.15614618288590562, "f1_stderr": 0.003108423155895864 }, "harness|gsm8k|5": { "acc": 0.009097801364670205, "acc_stderr": 0.0026153265107756725 }, "harness|winogrande|5": { "acc": 0.7190213101815311, "acc_stderr": 0.012632541095875825 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_WeOpenML__PandaLM-Alpaca-7B-v1
[ "region:us" ]
2023-08-18T10:11:43+00:00
{"pretty_name": "Evaluation run of WeOpenML/PandaLM-Alpaca-7B-v1", "dataset_summary": "Dataset automatically created during the evaluation run of model [WeOpenML/PandaLM-Alpaca-7B-v1](https://huggingface.co/WeOpenML/PandaLM-Alpaca-7B-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_WeOpenML__PandaLM-Alpaca-7B-v1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-09-22T22:33:33.888453](https://huggingface.co/datasets/open-llm-leaderboard/details_WeOpenML__PandaLM-Alpaca-7B-v1/blob/main/results_2023-09-22T22-33-33.888453.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0856753355704698,\n \"em_stderr\": 0.0028662744739059616,\n \"f1\": 0.15614618288590562,\n \"f1_stderr\": 0.003108423155895864,\n \"acc\": 0.3640595557731007,\n \"acc_stderr\": 0.007623933803325749\n },\n \"harness|drop|3\": {\n \"em\": 0.0856753355704698,\n \"em_stderr\": 0.0028662744739059616,\n \"f1\": 0.15614618288590562,\n \"f1_stderr\": 0.003108423155895864\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.009097801364670205,\n \"acc_stderr\": 0.0026153265107756725\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7190213101815311,\n \"acc_stderr\": 0.012632541095875825\n }\n}\n```", "repo_url": "https://huggingface.co/WeOpenML/PandaLM-Alpaca-7B-v1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|arc:challenge|25_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_09_22T22_33_33.888453", "path": ["**/details_harness|drop|3_2023-09-22T22-33-33.888453.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-09-22T22-33-33.888453.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_09_22T22_33_33.888453", "path": ["**/details_harness|gsm8k|5_2023-09-22T22-33-33.888453.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-09-22T22-33-33.888453.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hellaswag|10_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-24T12:03:47.951462.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-24T12:03:47.951462.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-24T12:03:47.951462.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_09_22T22_33_33.888453", "path": ["**/details_harness|winogrande|5_2023-09-22T22-33-33.888453.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-09-22T22-33-33.888453.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_24T12_03_47.951462", "path": ["results_2023-07-24T12:03:47.951462.parquet"]}, {"split": "2023_09_22T22_33_33.888453", "path": ["results_2023-09-22T22-33-33.888453.parquet"]}, {"split": "latest", "path": ["results_2023-09-22T22-33-33.888453.parquet"]}]}]}
2023-09-22T21:33:45+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of WeOpenML/PandaLM-Alpaca-7B-v1 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model WeOpenML/PandaLM-Alpaca-7B-v1 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-09-22T22:33:33.888453(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of WeOpenML/PandaLM-Alpaca-7B-v1", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model WeOpenML/PandaLM-Alpaca-7B-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-22T22:33:33.888453(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of WeOpenML/PandaLM-Alpaca-7B-v1", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model WeOpenML/PandaLM-Alpaca-7B-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-22T22:33:33.888453(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 25, 31, 173, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of WeOpenML/PandaLM-Alpaca-7B-v1## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model WeOpenML/PandaLM-Alpaca-7B-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-09-22T22:33:33.888453(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
279383fb4b952987ee3ff2dc988f83701db67faa
# Dataset Card for Evaluation run of ddobokki/Llama-2-70b-orca-200k ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/ddobokki/Llama-2-70b-orca-200k - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [ddobokki/Llama-2-70b-orca-200k](https://huggingface.co/ddobokki/Llama-2-70b-orca-200k) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_ddobokki__Llama-2-70b-orca-200k", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-09T20:21:28.711089](https://huggingface.co/datasets/open-llm-leaderboard/details_ddobokki__Llama-2-70b-orca-200k/blob/main/results_2023-08-09T20%3A21%3A28.711089.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6675525003199225, "acc_stderr": 0.0320256356518761, "acc_norm": 0.6716808545277895, "acc_norm_stderr": 0.03199912887877205, "mc1": 0.408812729498164, "mc1_stderr": 0.01720995215164173, "mc2": 0.5618014117500216, "mc2_stderr": 0.015000194909320638 }, "harness|arc:challenge|25": { "acc": 0.5989761092150171, "acc_stderr": 0.014322255790719867, "acc_norm": 0.6484641638225256, "acc_norm_stderr": 0.013952413699600935 }, "harness|hellaswag|10": { "acc": 0.6584345747858992, "acc_stderr": 0.004732654295724444, "acc_norm": 0.8525194184425413, "acc_norm_stderr": 0.0035385967737048313 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6148148148148148, "acc_stderr": 0.042039210401562783, "acc_norm": 0.6148148148148148, "acc_norm_stderr": 0.042039210401562783 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7697368421052632, "acc_stderr": 0.03426059424403165, "acc_norm": 0.7697368421052632, "acc_norm_stderr": 0.03426059424403165 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.71, "acc_stderr": 0.04560480215720684, "acc_norm": 0.71, "acc_norm_stderr": 0.04560480215720684 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6981132075471698, "acc_stderr": 0.02825420034443866, "acc_norm": 0.6981132075471698, "acc_norm_stderr": 0.02825420034443866 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7847222222222222, "acc_stderr": 0.034370793441061344, "acc_norm": 0.7847222222222222, "acc_norm_stderr": 0.034370793441061344 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.45, "acc_stderr": 0.04999999999999999, "acc_norm": 0.45, "acc_norm_stderr": 0.04999999999999999 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.57, "acc_stderr": 0.04975698519562428, "acc_norm": 0.57, "acc_norm_stderr": 0.04975698519562428 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6127167630057804, "acc_stderr": 0.037143259063020656, "acc_norm": 0.6127167630057804, "acc_norm_stderr": 0.037143259063020656 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3431372549019608, "acc_stderr": 0.04724007352383888, "acc_norm": 0.3431372549019608, "acc_norm_stderr": 0.04724007352383888 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.74, "acc_stderr": 0.04408440022768078, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768078 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6127659574468085, "acc_stderr": 0.03184389265339526, "acc_norm": 0.6127659574468085, "acc_norm_stderr": 0.03184389265339526 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4298245614035088, "acc_stderr": 0.04657047260594962, "acc_norm": 0.4298245614035088, "acc_norm_stderr": 0.04657047260594962 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.593103448275862, "acc_stderr": 0.04093793981266236, "acc_norm": 0.593103448275862, "acc_norm_stderr": 0.04093793981266236 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.41534391534391535, "acc_stderr": 0.025379524910778408, "acc_norm": 0.41534391534391535, "acc_norm_stderr": 0.025379524910778408 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4365079365079365, "acc_stderr": 0.04435932892851466, "acc_norm": 0.4365079365079365, "acc_norm_stderr": 0.04435932892851466 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8129032258064516, "acc_stderr": 0.02218571009225225, "acc_norm": 0.8129032258064516, "acc_norm_stderr": 0.02218571009225225 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5123152709359606, "acc_stderr": 0.035169204442208966, "acc_norm": 0.5123152709359606, "acc_norm_stderr": 0.035169204442208966 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.72, "acc_stderr": 0.04512608598542127, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8121212121212121, "acc_stderr": 0.03050193405942914, "acc_norm": 0.8121212121212121, "acc_norm_stderr": 0.03050193405942914 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8636363636363636, "acc_stderr": 0.024450155973189835, "acc_norm": 0.8636363636363636, "acc_norm_stderr": 0.024450155973189835 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9067357512953368, "acc_stderr": 0.0209868545932897, "acc_norm": 0.9067357512953368, "acc_norm_stderr": 0.0209868545932897 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6820512820512821, "acc_stderr": 0.02361088430892786, "acc_norm": 0.6820512820512821, "acc_norm_stderr": 0.02361088430892786 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.35555555555555557, "acc_stderr": 0.02918571494985741, "acc_norm": 0.35555555555555557, "acc_norm_stderr": 0.02918571494985741 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7142857142857143, "acc_stderr": 0.029344572500634335, "acc_norm": 0.7142857142857143, "acc_norm_stderr": 0.029344572500634335 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.40397350993377484, "acc_stderr": 0.04006485685365343, "acc_norm": 0.40397350993377484, "acc_norm_stderr": 0.04006485685365343 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8568807339449541, "acc_stderr": 0.015014462497168585, "acc_norm": 0.8568807339449541, "acc_norm_stderr": 0.015014462497168585 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5416666666666666, "acc_stderr": 0.03398110890294636, "acc_norm": 0.5416666666666666, "acc_norm_stderr": 0.03398110890294636 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8725490196078431, "acc_stderr": 0.023405530480846315, "acc_norm": 0.8725490196078431, "acc_norm_stderr": 0.023405530480846315 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8523206751054853, "acc_stderr": 0.0230943295825957, "acc_norm": 0.8523206751054853, "acc_norm_stderr": 0.0230943295825957 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7488789237668162, "acc_stderr": 0.02910522083322462, "acc_norm": 0.7488789237668162, "acc_norm_stderr": 0.02910522083322462 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7938931297709924, "acc_stderr": 0.03547771004159465, "acc_norm": 0.7938931297709924, "acc_norm_stderr": 0.03547771004159465 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8760330578512396, "acc_stderr": 0.030083098716035206, "acc_norm": 0.8760330578512396, "acc_norm_stderr": 0.030083098716035206 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8333333333333334, "acc_stderr": 0.036028141763926456, "acc_norm": 0.8333333333333334, "acc_norm_stderr": 0.036028141763926456 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7852760736196319, "acc_stderr": 0.03226219377286775, "acc_norm": 0.7852760736196319, "acc_norm_stderr": 0.03226219377286775 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4732142857142857, "acc_stderr": 0.047389751192741546, "acc_norm": 0.4732142857142857, "acc_norm_stderr": 0.047389751192741546 }, "harness|hendrycksTest-management|5": { "acc": 0.8155339805825242, "acc_stderr": 0.03840423627288276, "acc_norm": 0.8155339805825242, "acc_norm_stderr": 0.03840423627288276 }, "harness|hendrycksTest-marketing|5": { "acc": 0.9017094017094017, "acc_stderr": 0.019503444900757567, "acc_norm": 0.9017094017094017, "acc_norm_stderr": 0.019503444900757567 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.72, "acc_stderr": 0.04512608598542128, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.859514687100894, "acc_stderr": 0.012426211353093438, "acc_norm": 0.859514687100894, "acc_norm_stderr": 0.012426211353093438 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7745664739884393, "acc_stderr": 0.022497230190967558, "acc_norm": 0.7745664739884393, "acc_norm_stderr": 0.022497230190967558 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.5363128491620112, "acc_stderr": 0.016678341894533162, "acc_norm": 0.5363128491620112, "acc_norm_stderr": 0.016678341894533162 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6895424836601307, "acc_stderr": 0.026493033225145898, "acc_norm": 0.6895424836601307, "acc_norm_stderr": 0.026493033225145898 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7395498392282959, "acc_stderr": 0.02492672322484554, "acc_norm": 0.7395498392282959, "acc_norm_stderr": 0.02492672322484554 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7685185185185185, "acc_stderr": 0.023468429832451152, "acc_norm": 0.7685185185185185, "acc_norm_stderr": 0.023468429832451152 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.48936170212765956, "acc_stderr": 0.029820747191422473, "acc_norm": 0.48936170212765956, "acc_norm_stderr": 0.029820747191422473 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5084745762711864, "acc_stderr": 0.012768401697269048, "acc_norm": 0.5084745762711864, "acc_norm_stderr": 0.012768401697269048 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6838235294117647, "acc_stderr": 0.02824568739146292, "acc_norm": 0.6838235294117647, "acc_norm_stderr": 0.02824568739146292 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.7075163398692811, "acc_stderr": 0.018403415710109793, "acc_norm": 0.7075163398692811, "acc_norm_stderr": 0.018403415710109793 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7363636363636363, "acc_stderr": 0.04220224692971987, "acc_norm": 0.7363636363636363, "acc_norm_stderr": 0.04220224692971987 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.763265306122449, "acc_stderr": 0.027212835884073153, "acc_norm": 0.763265306122449, "acc_norm_stderr": 0.027212835884073153 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8557213930348259, "acc_stderr": 0.024845753212306042, "acc_norm": 0.8557213930348259, "acc_norm_stderr": 0.024845753212306042 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.035887028128263686, "acc_norm": 0.85, "acc_norm_stderr": 0.035887028128263686 }, "harness|hendrycksTest-virology|5": { "acc": 0.5120481927710844, "acc_stderr": 0.03891364495835817, "acc_norm": 0.5120481927710844, "acc_norm_stderr": 0.03891364495835817 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.847953216374269, "acc_stderr": 0.027539122889061445, "acc_norm": 0.847953216374269, "acc_norm_stderr": 0.027539122889061445 }, "harness|truthfulqa:mc|0": { "mc1": 0.408812729498164, "mc1_stderr": 0.01720995215164173, "mc2": 0.5618014117500216, "mc2_stderr": 0.015000194909320638 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_ddobokki__Llama-2-70b-orca-200k
[ "region:us" ]
2023-08-18T10:11:52+00:00
{"pretty_name": "Evaluation run of ddobokki/Llama-2-70b-orca-200k", "dataset_summary": "Dataset automatically created during the evaluation run of model [ddobokki/Llama-2-70b-orca-200k](https://huggingface.co/ddobokki/Llama-2-70b-orca-200k) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ddobokki__Llama-2-70b-orca-200k\",\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-08-09T20:21:28.711089](https://huggingface.co/datasets/open-llm-leaderboard/details_ddobokki__Llama-2-70b-orca-200k/blob/main/results_2023-08-09T20%3A21%3A28.711089.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6675525003199225,\n \"acc_stderr\": 0.0320256356518761,\n \"acc_norm\": 0.6716808545277895,\n \"acc_norm_stderr\": 0.03199912887877205,\n \"mc1\": 0.408812729498164,\n \"mc1_stderr\": 0.01720995215164173,\n \"mc2\": 0.5618014117500216,\n \"mc2_stderr\": 0.015000194909320638\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5989761092150171,\n \"acc_stderr\": 0.014322255790719867,\n \"acc_norm\": 0.6484641638225256,\n \"acc_norm_stderr\": 0.013952413699600935\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6584345747858992,\n \"acc_stderr\": 0.004732654295724444,\n \"acc_norm\": 0.8525194184425413,\n \"acc_norm_stderr\": 0.0035385967737048313\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6148148148148148,\n \"acc_stderr\": 0.042039210401562783,\n \"acc_norm\": 0.6148148148148148,\n \"acc_norm_stderr\": 0.042039210401562783\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7697368421052632,\n \"acc_stderr\": 0.03426059424403165,\n \"acc_norm\": 0.7697368421052632,\n \"acc_norm_stderr\": 0.03426059424403165\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.04560480215720684,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.04560480215720684\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6981132075471698,\n \"acc_stderr\": 0.02825420034443866,\n \"acc_norm\": 0.6981132075471698,\n \"acc_norm_stderr\": 0.02825420034443866\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7847222222222222,\n \"acc_stderr\": 0.034370793441061344,\n \"acc_norm\": 0.7847222222222222,\n \"acc_norm_stderr\": 0.034370793441061344\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.04999999999999999,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.04999999999999999\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.04975698519562428,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.04975698519562428\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6127167630057804,\n \"acc_stderr\": 0.037143259063020656,\n \"acc_norm\": 0.6127167630057804,\n \"acc_norm_stderr\": 0.037143259063020656\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.3431372549019608,\n \"acc_stderr\": 0.04724007352383888,\n \"acc_norm\": 0.3431372549019608,\n \"acc_norm_stderr\": 0.04724007352383888\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768078,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768078\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6127659574468085,\n \"acc_stderr\": 0.03184389265339526,\n \"acc_norm\": 0.6127659574468085,\n \"acc_norm_stderr\": 0.03184389265339526\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4298245614035088,\n \"acc_stderr\": 0.04657047260594962,\n \"acc_norm\": 0.4298245614035088,\n \"acc_norm_stderr\": 0.04657047260594962\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.593103448275862,\n \"acc_stderr\": 0.04093793981266236,\n \"acc_norm\": 0.593103448275862,\n \"acc_norm_stderr\": 0.04093793981266236\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.41534391534391535,\n \"acc_stderr\": 0.025379524910778408,\n \"acc_norm\": 0.41534391534391535,\n \"acc_norm_stderr\": 0.025379524910778408\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4365079365079365,\n \"acc_stderr\": 0.04435932892851466,\n \"acc_norm\": 0.4365079365079365,\n \"acc_norm_stderr\": 0.04435932892851466\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8129032258064516,\n \"acc_stderr\": 0.02218571009225225,\n \"acc_norm\": 0.8129032258064516,\n \"acc_norm_stderr\": 0.02218571009225225\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5123152709359606,\n \"acc_stderr\": 0.035169204442208966,\n \"acc_norm\": 0.5123152709359606,\n \"acc_norm_stderr\": 0.035169204442208966\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8121212121212121,\n \"acc_stderr\": 0.03050193405942914,\n \"acc_norm\": 0.8121212121212121,\n \"acc_norm_stderr\": 0.03050193405942914\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8636363636363636,\n \"acc_stderr\": 0.024450155973189835,\n \"acc_norm\": 0.8636363636363636,\n \"acc_norm_stderr\": 0.024450155973189835\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9067357512953368,\n \"acc_stderr\": 0.0209868545932897,\n \"acc_norm\": 0.9067357512953368,\n \"acc_norm_stderr\": 0.0209868545932897\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6820512820512821,\n \"acc_stderr\": 0.02361088430892786,\n \"acc_norm\": 0.6820512820512821,\n \"acc_norm_stderr\": 0.02361088430892786\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.35555555555555557,\n \"acc_stderr\": 0.02918571494985741,\n \"acc_norm\": 0.35555555555555557,\n \"acc_norm_stderr\": 0.02918571494985741\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7142857142857143,\n \"acc_stderr\": 0.029344572500634335,\n \"acc_norm\": 0.7142857142857143,\n \"acc_norm_stderr\": 0.029344572500634335\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.40397350993377484,\n \"acc_stderr\": 0.04006485685365343,\n \"acc_norm\": 0.40397350993377484,\n \"acc_norm_stderr\": 0.04006485685365343\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8568807339449541,\n \"acc_stderr\": 0.015014462497168585,\n \"acc_norm\": 0.8568807339449541,\n \"acc_norm_stderr\": 0.015014462497168585\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5416666666666666,\n \"acc_stderr\": 0.03398110890294636,\n \"acc_norm\": 0.5416666666666666,\n \"acc_norm_stderr\": 0.03398110890294636\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8725490196078431,\n \"acc_stderr\": 0.023405530480846315,\n \"acc_norm\": 0.8725490196078431,\n \"acc_norm_stderr\": 0.023405530480846315\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8523206751054853,\n \"acc_stderr\": 0.0230943295825957,\n \"acc_norm\": 0.8523206751054853,\n \"acc_norm_stderr\": 0.0230943295825957\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7488789237668162,\n \"acc_stderr\": 0.02910522083322462,\n \"acc_norm\": 0.7488789237668162,\n \"acc_norm_stderr\": 0.02910522083322462\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7938931297709924,\n \"acc_stderr\": 0.03547771004159465,\n \"acc_norm\": 0.7938931297709924,\n \"acc_norm_stderr\": 0.03547771004159465\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8760330578512396,\n \"acc_stderr\": 0.030083098716035206,\n \"acc_norm\": 0.8760330578512396,\n \"acc_norm_stderr\": 0.030083098716035206\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8333333333333334,\n \"acc_stderr\": 0.036028141763926456,\n \"acc_norm\": 0.8333333333333334,\n \"acc_norm_stderr\": 0.036028141763926456\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7852760736196319,\n \"acc_stderr\": 0.03226219377286775,\n \"acc_norm\": 0.7852760736196319,\n \"acc_norm_stderr\": 0.03226219377286775\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4732142857142857,\n \"acc_stderr\": 0.047389751192741546,\n \"acc_norm\": 0.4732142857142857,\n \"acc_norm_stderr\": 0.047389751192741546\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8155339805825242,\n \"acc_stderr\": 0.03840423627288276,\n \"acc_norm\": 0.8155339805825242,\n \"acc_norm_stderr\": 0.03840423627288276\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9017094017094017,\n \"acc_stderr\": 0.019503444900757567,\n \"acc_norm\": 0.9017094017094017,\n \"acc_norm_stderr\": 0.019503444900757567\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.859514687100894,\n \"acc_stderr\": 0.012426211353093438,\n \"acc_norm\": 0.859514687100894,\n \"acc_norm_stderr\": 0.012426211353093438\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7745664739884393,\n \"acc_stderr\": 0.022497230190967558,\n \"acc_norm\": 0.7745664739884393,\n \"acc_norm_stderr\": 0.022497230190967558\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.5363128491620112,\n \"acc_stderr\": 0.016678341894533162,\n \"acc_norm\": 0.5363128491620112,\n \"acc_norm_stderr\": 0.016678341894533162\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6895424836601307,\n \"acc_stderr\": 0.026493033225145898,\n \"acc_norm\": 0.6895424836601307,\n \"acc_norm_stderr\": 0.026493033225145898\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7395498392282959,\n \"acc_stderr\": 0.02492672322484554,\n \"acc_norm\": 0.7395498392282959,\n \"acc_norm_stderr\": 0.02492672322484554\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7685185185185185,\n \"acc_stderr\": 0.023468429832451152,\n \"acc_norm\": 0.7685185185185185,\n \"acc_norm_stderr\": 0.023468429832451152\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.48936170212765956,\n \"acc_stderr\": 0.029820747191422473,\n \"acc_norm\": 0.48936170212765956,\n \"acc_norm_stderr\": 0.029820747191422473\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5084745762711864,\n \"acc_stderr\": 0.012768401697269048,\n \"acc_norm\": 0.5084745762711864,\n \"acc_norm_stderr\": 0.012768401697269048\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6838235294117647,\n \"acc_stderr\": 0.02824568739146292,\n \"acc_norm\": 0.6838235294117647,\n \"acc_norm_stderr\": 0.02824568739146292\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.7075163398692811,\n \"acc_stderr\": 0.018403415710109793,\n \"acc_norm\": 0.7075163398692811,\n \"acc_norm_stderr\": 0.018403415710109793\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7363636363636363,\n \"acc_stderr\": 0.04220224692971987,\n \"acc_norm\": 0.7363636363636363,\n \"acc_norm_stderr\": 0.04220224692971987\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.763265306122449,\n \"acc_stderr\": 0.027212835884073153,\n \"acc_norm\": 0.763265306122449,\n \"acc_norm_stderr\": 0.027212835884073153\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8557213930348259,\n \"acc_stderr\": 0.024845753212306042,\n \"acc_norm\": 0.8557213930348259,\n \"acc_norm_stderr\": 0.024845753212306042\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.035887028128263686,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.035887028128263686\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5120481927710844,\n \"acc_stderr\": 0.03891364495835817,\n \"acc_norm\": 0.5120481927710844,\n \"acc_norm_stderr\": 0.03891364495835817\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.847953216374269,\n \"acc_stderr\": 0.027539122889061445,\n \"acc_norm\": 0.847953216374269,\n \"acc_norm_stderr\": 0.027539122889061445\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.408812729498164,\n \"mc1_stderr\": 0.01720995215164173,\n \"mc2\": 0.5618014117500216,\n \"mc2_stderr\": 0.015000194909320638\n }\n}\n```", "repo_url": "https://huggingface.co/ddobokki/Llama-2-70b-orca-200k", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|arc:challenge|25_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hellaswag|10_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-09T20:21:28.711089.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-09T20:21:28.711089.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_09T20_21_28.711089", "path": ["results_2023-08-09T20:21:28.711089.parquet"]}, {"split": "latest", "path": ["results_2023-08-09T20:21:28.711089.parquet"]}]}]}
2023-08-27T11:31:09+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of ddobokki/Llama-2-70b-orca-200k ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model ddobokki/Llama-2-70b-orca-200k on the Open LLM Leaderboard. The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-08-09T20:21:28.711089 (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of ddobokki/Llama-2-70b-orca-200k", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model ddobokki/Llama-2-70b-orca-200k on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-08-09T20:21:28.711089 (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of ddobokki/Llama-2-70b-orca-200k", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model ddobokki/Llama-2-70b-orca-200k on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-08-09T20:21:28.711089 (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 24, 31, 172, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of ddobokki/Llama-2-70b-orca-200k## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model ddobokki/Llama-2-70b-orca-200k on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-08-09T20:21:28.711089 (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
02b2b9ba7145893a1bdc0dec892bff33e1e2e71b
# Dataset Card for Evaluation run of OpenAssistant/llama2-13b-orca-v2-8k-3166 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/OpenAssistant/llama2-13b-orca-v2-8k-3166 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [OpenAssistant/llama2-13b-orca-v2-8k-3166](https://huggingface.co/OpenAssistant/llama2-13b-orca-v2-8k-3166) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_OpenAssistant__llama2-13b-orca-v2-8k-3166", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-09T13:23:40.294595](https://huggingface.co/datasets/open-llm-leaderboard/details_OpenAssistant__llama2-13b-orca-v2-8k-3166/blob/main/results_2023-08-09T13%3A23%3A40.294595.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5553403891597735, "acc_stderr": 0.03450505851015507, "acc_norm": 0.5593887991541618, "acc_norm_stderr": 0.034487735527209754, "mc1": 0.31701346389228885, "mc1_stderr": 0.016289203374403382, "mc2": 0.4670110907150086, "mc2_stderr": 0.014805030280432917 }, "harness|arc:challenge|25": { "acc": 0.5366894197952219, "acc_stderr": 0.01457200052775699, "acc_norm": 0.5691126279863481, "acc_norm_stderr": 0.01447113339264247 }, "harness|hellaswag|10": { "acc": 0.5955984863572994, "acc_stderr": 0.004897728370737244, "acc_norm": 0.8020314678350926, "acc_norm_stderr": 0.0039765395120785856 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4962962962962963, "acc_stderr": 0.04319223625811331, "acc_norm": 0.4962962962962963, "acc_norm_stderr": 0.04319223625811331 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.618421052631579, "acc_stderr": 0.03953173377749194, "acc_norm": 0.618421052631579, "acc_norm_stderr": 0.03953173377749194 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5849056603773585, "acc_stderr": 0.03032594578928611, "acc_norm": 0.5849056603773585, "acc_norm_stderr": 0.03032594578928611 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5555555555555556, "acc_stderr": 0.04155319955593147, "acc_norm": 0.5555555555555556, "acc_norm_stderr": 0.04155319955593147 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.4797687861271676, "acc_stderr": 0.03809342081273957, "acc_norm": 0.4797687861271676, "acc_norm_stderr": 0.03809342081273957 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.29411764705882354, "acc_stderr": 0.04533838195929776, "acc_norm": 0.29411764705882354, "acc_norm_stderr": 0.04533838195929776 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.68, "acc_stderr": 0.04688261722621505, "acc_norm": 0.68, "acc_norm_stderr": 0.04688261722621505 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.4127659574468085, "acc_stderr": 0.03218471141400351, "acc_norm": 0.4127659574468085, "acc_norm_stderr": 0.03218471141400351 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.3157894736842105, "acc_stderr": 0.04372748290278007, "acc_norm": 0.3157894736842105, "acc_norm_stderr": 0.04372748290278007 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5241379310344828, "acc_stderr": 0.0416180850350153, "acc_norm": 0.5241379310344828, "acc_norm_stderr": 0.0416180850350153 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.024278568024307695, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.024278568024307695 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3253968253968254, "acc_stderr": 0.041905964388711366, "acc_norm": 0.3253968253968254, "acc_norm_stderr": 0.041905964388711366 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.38, "acc_stderr": 0.04878317312145632, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145632 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6387096774193548, "acc_stderr": 0.02732754844795754, "acc_norm": 0.6387096774193548, "acc_norm_stderr": 0.02732754844795754 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.43842364532019706, "acc_stderr": 0.03491207857486518, "acc_norm": 0.43842364532019706, "acc_norm_stderr": 0.03491207857486518 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.53, "acc_stderr": 0.050161355804659205, "acc_norm": 0.53, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7090909090909091, "acc_stderr": 0.03546563019624336, "acc_norm": 0.7090909090909091, "acc_norm_stderr": 0.03546563019624336 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.702020202020202, "acc_stderr": 0.03258630383836557, "acc_norm": 0.702020202020202, "acc_norm_stderr": 0.03258630383836557 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7927461139896373, "acc_stderr": 0.029252823291803627, "acc_norm": 0.7927461139896373, "acc_norm_stderr": 0.029252823291803627 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5153846153846153, "acc_stderr": 0.02533900301010651, "acc_norm": 0.5153846153846153, "acc_norm_stderr": 0.02533900301010651 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.35555555555555557, "acc_stderr": 0.02918571494985741, "acc_norm": 0.35555555555555557, "acc_norm_stderr": 0.02918571494985741 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5546218487394958, "acc_stderr": 0.032284106267163895, "acc_norm": 0.5546218487394958, "acc_norm_stderr": 0.032284106267163895 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3576158940397351, "acc_stderr": 0.03913453431177258, "acc_norm": 0.3576158940397351, "acc_norm_stderr": 0.03913453431177258 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7339449541284404, "acc_stderr": 0.018946022322225604, "acc_norm": 0.7339449541284404, "acc_norm_stderr": 0.018946022322225604 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.38425925925925924, "acc_stderr": 0.03317354514310742, "acc_norm": 0.38425925925925924, "acc_norm_stderr": 0.03317354514310742 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7794117647058824, "acc_stderr": 0.02910225438967408, "acc_norm": 0.7794117647058824, "acc_norm_stderr": 0.02910225438967408 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7510548523206751, "acc_stderr": 0.028146970599422644, "acc_norm": 0.7510548523206751, "acc_norm_stderr": 0.028146970599422644 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6502242152466368, "acc_stderr": 0.03200736719484503, "acc_norm": 0.6502242152466368, "acc_norm_stderr": 0.03200736719484503 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6335877862595419, "acc_stderr": 0.04225875451969637, "acc_norm": 0.6335877862595419, "acc_norm_stderr": 0.04225875451969637 }, "harness|hendrycksTest-international_law|5": { "acc": 0.768595041322314, "acc_stderr": 0.03849856098794089, "acc_norm": 0.768595041322314, "acc_norm_stderr": 0.03849856098794089 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.6574074074074074, "acc_stderr": 0.045879047413018105, "acc_norm": 0.6574074074074074, "acc_norm_stderr": 0.045879047413018105 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6134969325153374, "acc_stderr": 0.03825825548848607, "acc_norm": 0.6134969325153374, "acc_norm_stderr": 0.03825825548848607 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.33035714285714285, "acc_stderr": 0.04464285714285713, "acc_norm": 0.33035714285714285, "acc_norm_stderr": 0.04464285714285713 }, "harness|hendrycksTest-management|5": { "acc": 0.6990291262135923, "acc_stderr": 0.04541609446503948, "acc_norm": 0.6990291262135923, "acc_norm_stderr": 0.04541609446503948 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8076923076923077, "acc_stderr": 0.02581923325648373, "acc_norm": 0.8076923076923077, "acc_norm_stderr": 0.02581923325648373 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.64, "acc_stderr": 0.04824181513244218, "acc_norm": 0.64, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7547892720306514, "acc_stderr": 0.015384352284543941, "acc_norm": 0.7547892720306514, "acc_norm_stderr": 0.015384352284543941 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6358381502890174, "acc_stderr": 0.025906632631016124, "acc_norm": 0.6358381502890174, "acc_norm_stderr": 0.025906632631016124 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.32513966480446926, "acc_stderr": 0.015666542785053566, "acc_norm": 0.32513966480446926, "acc_norm_stderr": 0.015666542785053566 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6339869281045751, "acc_stderr": 0.027582811415159614, "acc_norm": 0.6339869281045751, "acc_norm_stderr": 0.027582811415159614 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6109324758842444, "acc_stderr": 0.027690337536485372, "acc_norm": 0.6109324758842444, "acc_norm_stderr": 0.027690337536485372 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.595679012345679, "acc_stderr": 0.027306625297327688, "acc_norm": 0.595679012345679, "acc_norm_stderr": 0.027306625297327688 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.39361702127659576, "acc_stderr": 0.02914454478159615, "acc_norm": 0.39361702127659576, "acc_norm_stderr": 0.02914454478159615 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.42633637548891784, "acc_stderr": 0.012630884771599696, "acc_norm": 0.42633637548891784, "acc_norm_stderr": 0.012630884771599696 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.4963235294117647, "acc_stderr": 0.030372015885428188, "acc_norm": 0.4963235294117647, "acc_norm_stderr": 0.030372015885428188 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.545751633986928, "acc_stderr": 0.020142974553795205, "acc_norm": 0.545751633986928, "acc_norm_stderr": 0.020142974553795205 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6272727272727273, "acc_stderr": 0.04631381319425465, "acc_norm": 0.6272727272727273, "acc_norm_stderr": 0.04631381319425465 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6938775510204082, "acc_stderr": 0.029504896454595957, "acc_norm": 0.6938775510204082, "acc_norm_stderr": 0.029504896454595957 }, "harness|hendrycksTest-sociology|5": { "acc": 0.746268656716418, "acc_stderr": 0.030769444967296018, "acc_norm": 0.746268656716418, "acc_norm_stderr": 0.030769444967296018 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.8, "acc_stderr": 0.040201512610368466, "acc_norm": 0.8, "acc_norm_stderr": 0.040201512610368466 }, "harness|hendrycksTest-virology|5": { "acc": 0.40963855421686746, "acc_stderr": 0.03828401115079022, "acc_norm": 0.40963855421686746, "acc_norm_stderr": 0.03828401115079022 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.783625730994152, "acc_stderr": 0.03158149539338734, "acc_norm": 0.783625730994152, "acc_norm_stderr": 0.03158149539338734 }, "harness|truthfulqa:mc|0": { "mc1": 0.31701346389228885, "mc1_stderr": 0.016289203374403382, "mc2": 0.4670110907150086, "mc2_stderr": 0.014805030280432917 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_OpenAssistant__llama2-13b-orca-v2-8k-3166
[ "region:us" ]
2023-08-18T10:12:00+00:00
{"pretty_name": "Evaluation run of OpenAssistant/llama2-13b-orca-v2-8k-3166", "dataset_summary": "Dataset automatically created during the evaluation run of model [OpenAssistant/llama2-13b-orca-v2-8k-3166](https://huggingface.co/OpenAssistant/llama2-13b-orca-v2-8k-3166) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_OpenAssistant__llama2-13b-orca-v2-8k-3166\",\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-08-09T13:23:40.294595](https://huggingface.co/datasets/open-llm-leaderboard/details_OpenAssistant__llama2-13b-orca-v2-8k-3166/blob/main/results_2023-08-09T13%3A23%3A40.294595.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5553403891597735,\n \"acc_stderr\": 0.03450505851015507,\n \"acc_norm\": 0.5593887991541618,\n \"acc_norm_stderr\": 0.034487735527209754,\n \"mc1\": 0.31701346389228885,\n \"mc1_stderr\": 0.016289203374403382,\n \"mc2\": 0.4670110907150086,\n \"mc2_stderr\": 0.014805030280432917\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5366894197952219,\n \"acc_stderr\": 0.01457200052775699,\n \"acc_norm\": 0.5691126279863481,\n \"acc_norm_stderr\": 0.01447113339264247\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5955984863572994,\n \"acc_stderr\": 0.004897728370737244,\n \"acc_norm\": 0.8020314678350926,\n \"acc_norm_stderr\": 0.0039765395120785856\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4962962962962963,\n \"acc_stderr\": 0.04319223625811331,\n \"acc_norm\": 0.4962962962962963,\n \"acc_norm_stderr\": 0.04319223625811331\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.618421052631579,\n \"acc_stderr\": 0.03953173377749194,\n \"acc_norm\": 0.618421052631579,\n \"acc_norm_stderr\": 0.03953173377749194\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.5849056603773585,\n \"acc_stderr\": 0.03032594578928611,\n \"acc_norm\": 0.5849056603773585,\n \"acc_norm_stderr\": 0.03032594578928611\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5555555555555556,\n \"acc_stderr\": 0.04155319955593147,\n \"acc_norm\": 0.5555555555555556,\n \"acc_norm_stderr\": 0.04155319955593147\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.4797687861271676,\n \"acc_stderr\": 0.03809342081273957,\n \"acc_norm\": 0.4797687861271676,\n \"acc_norm_stderr\": 0.03809342081273957\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.29411764705882354,\n \"acc_stderr\": 0.04533838195929776,\n \"acc_norm\": 0.29411764705882354,\n \"acc_norm_stderr\": 0.04533838195929776\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.04688261722621505,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.04688261722621505\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.4127659574468085,\n \"acc_stderr\": 0.03218471141400351,\n \"acc_norm\": 0.4127659574468085,\n \"acc_norm_stderr\": 0.03218471141400351\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.3157894736842105,\n \"acc_stderr\": 0.04372748290278007,\n \"acc_norm\": 0.3157894736842105,\n \"acc_norm_stderr\": 0.04372748290278007\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5241379310344828,\n \"acc_stderr\": 0.0416180850350153,\n \"acc_norm\": 0.5241379310344828,\n \"acc_norm_stderr\": 0.0416180850350153\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.024278568024307695,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.024278568024307695\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3253968253968254,\n \"acc_stderr\": 0.041905964388711366,\n \"acc_norm\": 0.3253968253968254,\n \"acc_norm_stderr\": 0.041905964388711366\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145632,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145632\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6387096774193548,\n \"acc_stderr\": 0.02732754844795754,\n \"acc_norm\": 0.6387096774193548,\n \"acc_norm_stderr\": 0.02732754844795754\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.43842364532019706,\n \"acc_stderr\": 0.03491207857486518,\n \"acc_norm\": 0.43842364532019706,\n \"acc_norm_stderr\": 0.03491207857486518\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7090909090909091,\n \"acc_stderr\": 0.03546563019624336,\n \"acc_norm\": 0.7090909090909091,\n \"acc_norm_stderr\": 0.03546563019624336\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.702020202020202,\n \"acc_stderr\": 0.03258630383836557,\n \"acc_norm\": 0.702020202020202,\n \"acc_norm_stderr\": 0.03258630383836557\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.7927461139896373,\n \"acc_stderr\": 0.029252823291803627,\n \"acc_norm\": 0.7927461139896373,\n \"acc_norm_stderr\": 0.029252823291803627\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5153846153846153,\n \"acc_stderr\": 0.02533900301010651,\n \"acc_norm\": 0.5153846153846153,\n \"acc_norm_stderr\": 0.02533900301010651\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.35555555555555557,\n \"acc_stderr\": 0.02918571494985741,\n \"acc_norm\": 0.35555555555555557,\n \"acc_norm_stderr\": 0.02918571494985741\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.5546218487394958,\n \"acc_stderr\": 0.032284106267163895,\n \"acc_norm\": 0.5546218487394958,\n \"acc_norm_stderr\": 0.032284106267163895\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3576158940397351,\n \"acc_stderr\": 0.03913453431177258,\n \"acc_norm\": 0.3576158940397351,\n \"acc_norm_stderr\": 0.03913453431177258\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7339449541284404,\n \"acc_stderr\": 0.018946022322225604,\n \"acc_norm\": 0.7339449541284404,\n \"acc_norm_stderr\": 0.018946022322225604\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.38425925925925924,\n \"acc_stderr\": 0.03317354514310742,\n \"acc_norm\": 0.38425925925925924,\n \"acc_norm_stderr\": 0.03317354514310742\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7794117647058824,\n \"acc_stderr\": 0.02910225438967408,\n \"acc_norm\": 0.7794117647058824,\n \"acc_norm_stderr\": 0.02910225438967408\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7510548523206751,\n \"acc_stderr\": 0.028146970599422644,\n \"acc_norm\": 0.7510548523206751,\n \"acc_norm_stderr\": 0.028146970599422644\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6502242152466368,\n \"acc_stderr\": 0.03200736719484503,\n \"acc_norm\": 0.6502242152466368,\n \"acc_norm_stderr\": 0.03200736719484503\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.6335877862595419,\n \"acc_stderr\": 0.04225875451969637,\n \"acc_norm\": 0.6335877862595419,\n \"acc_norm_stderr\": 0.04225875451969637\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.768595041322314,\n \"acc_stderr\": 0.03849856098794089,\n \"acc_norm\": 0.768595041322314,\n \"acc_norm_stderr\": 0.03849856098794089\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6574074074074074,\n \"acc_stderr\": 0.045879047413018105,\n \"acc_norm\": 0.6574074074074074,\n \"acc_norm_stderr\": 0.045879047413018105\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6134969325153374,\n \"acc_stderr\": 0.03825825548848607,\n \"acc_norm\": 0.6134969325153374,\n \"acc_norm_stderr\": 0.03825825548848607\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.33035714285714285,\n \"acc_stderr\": 0.04464285714285713,\n \"acc_norm\": 0.33035714285714285,\n \"acc_norm_stderr\": 0.04464285714285713\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.6990291262135923,\n \"acc_stderr\": 0.04541609446503948,\n \"acc_norm\": 0.6990291262135923,\n \"acc_norm_stderr\": 0.04541609446503948\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8076923076923077,\n \"acc_stderr\": 0.02581923325648373,\n \"acc_norm\": 0.8076923076923077,\n \"acc_norm_stderr\": 0.02581923325648373\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.64,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.64,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7547892720306514,\n \"acc_stderr\": 0.015384352284543941,\n \"acc_norm\": 0.7547892720306514,\n \"acc_norm_stderr\": 0.015384352284543941\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6358381502890174,\n \"acc_stderr\": 0.025906632631016124,\n \"acc_norm\": 0.6358381502890174,\n \"acc_norm_stderr\": 0.025906632631016124\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.32513966480446926,\n \"acc_stderr\": 0.015666542785053566,\n \"acc_norm\": 0.32513966480446926,\n \"acc_norm_stderr\": 0.015666542785053566\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6339869281045751,\n \"acc_stderr\": 0.027582811415159614,\n \"acc_norm\": 0.6339869281045751,\n \"acc_norm_stderr\": 0.027582811415159614\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6109324758842444,\n \"acc_stderr\": 0.027690337536485372,\n \"acc_norm\": 0.6109324758842444,\n \"acc_norm_stderr\": 0.027690337536485372\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.595679012345679,\n \"acc_stderr\": 0.027306625297327688,\n \"acc_norm\": 0.595679012345679,\n \"acc_norm_stderr\": 0.027306625297327688\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.39361702127659576,\n \"acc_stderr\": 0.02914454478159615,\n \"acc_norm\": 0.39361702127659576,\n \"acc_norm_stderr\": 0.02914454478159615\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.42633637548891784,\n \"acc_stderr\": 0.012630884771599696,\n \"acc_norm\": 0.42633637548891784,\n \"acc_norm_stderr\": 0.012630884771599696\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.4963235294117647,\n \"acc_stderr\": 0.030372015885428188,\n \"acc_norm\": 0.4963235294117647,\n \"acc_norm_stderr\": 0.030372015885428188\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.545751633986928,\n \"acc_stderr\": 0.020142974553795205,\n \"acc_norm\": 0.545751633986928,\n \"acc_norm_stderr\": 0.020142974553795205\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6272727272727273,\n \"acc_stderr\": 0.04631381319425465,\n \"acc_norm\": 0.6272727272727273,\n \"acc_norm_stderr\": 0.04631381319425465\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6938775510204082,\n \"acc_stderr\": 0.029504896454595957,\n \"acc_norm\": 0.6938775510204082,\n \"acc_norm_stderr\": 0.029504896454595957\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.746268656716418,\n \"acc_stderr\": 0.030769444967296018,\n \"acc_norm\": 0.746268656716418,\n \"acc_norm_stderr\": 0.030769444967296018\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.040201512610368466,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.040201512610368466\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.40963855421686746,\n \"acc_stderr\": 0.03828401115079022,\n \"acc_norm\": 0.40963855421686746,\n \"acc_norm_stderr\": 0.03828401115079022\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.783625730994152,\n \"acc_stderr\": 0.03158149539338734,\n \"acc_norm\": 0.783625730994152,\n \"acc_norm_stderr\": 0.03158149539338734\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.31701346389228885,\n \"mc1_stderr\": 0.016289203374403382,\n \"mc2\": 0.4670110907150086,\n \"mc2_stderr\": 0.014805030280432917\n }\n}\n```", "repo_url": "https://huggingface.co/OpenAssistant/llama2-13b-orca-v2-8k-3166", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|arc:challenge|25_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hellaswag|10_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-09T13:23:40.294595.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-09T13:23:40.294595.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_09T13_23_40.294595", "path": ["results_2023-08-09T13:23:40.294595.parquet"]}, {"split": "latest", "path": ["results_2023-08-09T13:23:40.294595.parquet"]}]}]}
2023-08-27T11:31:11+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of OpenAssistant/llama2-13b-orca-v2-8k-3166 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model OpenAssistant/llama2-13b-orca-v2-8k-3166 on the Open LLM Leaderboard. The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-08-09T13:23:40.294595 (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of OpenAssistant/llama2-13b-orca-v2-8k-3166", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model OpenAssistant/llama2-13b-orca-v2-8k-3166 on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-08-09T13:23:40.294595 (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of OpenAssistant/llama2-13b-orca-v2-8k-3166", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model OpenAssistant/llama2-13b-orca-v2-8k-3166 on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-08-09T13:23:40.294595 (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 29, 31, 177, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of OpenAssistant/llama2-13b-orca-v2-8k-3166## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model OpenAssistant/llama2-13b-orca-v2-8k-3166 on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-08-09T13:23:40.294595 (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
e0b567044caaa6ffd2243bd2106082a37a4535ee
# Dataset Card for Evaluation run of OpenAssistant/oasst-sft-1-pythia-12b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/OpenAssistant/oasst-sft-1-pythia-12b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [OpenAssistant/oasst-sft-1-pythia-12b](https://huggingface.co/OpenAssistant/oasst-sft-1-pythia-12b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_OpenAssistant__oasst-sft-1-pythia-12b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-22T03:38:38.139466](https://huggingface.co/datasets/open-llm-leaderboard/details_OpenAssistant__oasst-sft-1-pythia-12b/blob/main/results_2023-10-22T03-38-38.139466.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.007340604026845637, "em_stderr": 0.000874189687534619, "f1": 0.06295302013422834, "f1_stderr": 0.0015822681875736675, "acc": 0.3140033976135325, "acc_stderr": 0.007883417880991842 }, "harness|drop|3": { "em": 0.007340604026845637, "em_stderr": 0.000874189687534619, "f1": 0.06295302013422834, "f1_stderr": 0.0015822681875736675 }, "harness|gsm8k|5": { "acc": 0.006065200909780136, "acc_stderr": 0.002138670301460446 }, "harness|winogrande|5": { "acc": 0.6219415943172849, "acc_stderr": 0.013628165460523237 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_OpenAssistant__oasst-sft-1-pythia-12b
[ "region:us" ]
2023-08-18T10:12:09+00:00
{"pretty_name": "Evaluation run of OpenAssistant/oasst-sft-1-pythia-12b", "dataset_summary": "Dataset automatically created during the evaluation run of model [OpenAssistant/oasst-sft-1-pythia-12b](https://huggingface.co/OpenAssistant/oasst-sft-1-pythia-12b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_OpenAssistant__oasst-sft-1-pythia-12b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-22T03:38:38.139466](https://huggingface.co/datasets/open-llm-leaderboard/details_OpenAssistant__oasst-sft-1-pythia-12b/blob/main/results_2023-10-22T03-38-38.139466.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.007340604026845637,\n \"em_stderr\": 0.000874189687534619,\n \"f1\": 0.06295302013422834,\n \"f1_stderr\": 0.0015822681875736675,\n \"acc\": 0.3140033976135325,\n \"acc_stderr\": 0.007883417880991842\n },\n \"harness|drop|3\": {\n \"em\": 0.007340604026845637,\n \"em_stderr\": 0.000874189687534619,\n \"f1\": 0.06295302013422834,\n \"f1_stderr\": 0.0015822681875736675\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.006065200909780136,\n \"acc_stderr\": 0.002138670301460446\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6219415943172849,\n \"acc_stderr\": 0.013628165460523237\n }\n}\n```", "repo_url": "https://huggingface.co/OpenAssistant/oasst-sft-1-pythia-12b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|arc:challenge|25_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_22T03_38_38.139466", "path": ["**/details_harness|drop|3_2023-10-22T03-38-38.139466.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-22T03-38-38.139466.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_22T03_38_38.139466", "path": ["**/details_harness|gsm8k|5_2023-10-22T03-38-38.139466.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-22T03-38-38.139466.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hellaswag|10_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T18:16:49.631586.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T18:16:49.631586.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T18:16:49.631586.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_22T03_38_38.139466", "path": ["**/details_harness|winogrande|5_2023-10-22T03-38-38.139466.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-22T03-38-38.139466.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_19T18_16_49.631586", "path": ["results_2023-07-19T18:16:49.631586.parquet"]}, {"split": "2023_10_22T03_38_38.139466", "path": ["results_2023-10-22T03-38-38.139466.parquet"]}, {"split": "latest", "path": ["results_2023-10-22T03-38-38.139466.parquet"]}]}]}
2023-10-22T02:38:50+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of OpenAssistant/oasst-sft-1-pythia-12b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model OpenAssistant/oasst-sft-1-pythia-12b on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-22T03:38:38.139466(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of OpenAssistant/oasst-sft-1-pythia-12b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model OpenAssistant/oasst-sft-1-pythia-12b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-22T03:38:38.139466(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of OpenAssistant/oasst-sft-1-pythia-12b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model OpenAssistant/oasst-sft-1-pythia-12b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-22T03:38:38.139466(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 26, 31, 174, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of OpenAssistant/oasst-sft-1-pythia-12b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model OpenAssistant/oasst-sft-1-pythia-12b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-22T03:38:38.139466(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
66c813caa4200bea95a836a9a118dff1e530cc37
# Dataset Card for "RLHF_data" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
LawChat-tw/RLHF_data
[ "region:us" ]
2023-08-18T10:12:11+00:00
{"dataset_info": {"features": [{"name": "prompt", "dtype": "string"}, {"name": "chosen", "dtype": "string"}, {"name": "rejected", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 22322471.532124087, "num_examples": 18876}, {"name": "test", "num_bytes": 5581800.467875911, "num_examples": 4720}], "download_size": 18063980, "dataset_size": 27904272.0}}
2023-08-18T11:40:27+00:00
[]
[]
TAGS #region-us
# Dataset Card for "RLHF_data" More Information needed
[ "# Dataset Card for \"RLHF_data\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"RLHF_data\"\n\nMore Information needed" ]
[ 6, 14 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"RLHF_data\"\n\nMore Information needed" ]
eb6a37eda484a5583b215855b3c30b551fc14b29
# Dataset Card for Evaluation run of OpenAssistant/stablelm-7b-sft-v7-epoch-3 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/OpenAssistant/stablelm-7b-sft-v7-epoch-3 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [OpenAssistant/stablelm-7b-sft-v7-epoch-3](https://huggingface.co/OpenAssistant/stablelm-7b-sft-v7-epoch-3) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_OpenAssistant__stablelm-7b-sft-v7-epoch-3", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-13T03:23:25.661445](https://huggingface.co/datasets/open-llm-leaderboard/details_OpenAssistant__stablelm-7b-sft-v7-epoch-3/blob/main/results_2023-10-13T03-23-25.661445.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.05578859060402685, "em_stderr": 0.0023504280872280073, "f1": 0.10613569630872476, "f1_stderr": 0.0026144580255279513, "acc": 0.27616530425036784, "acc_stderr": 0.007839405520583978 }, "harness|drop|3": { "em": 0.05578859060402685, "em_stderr": 0.0023504280872280073, "f1": 0.10613569630872476, "f1_stderr": 0.0026144580255279513 }, "harness|gsm8k|5": { "acc": 0.0037907505686125853, "acc_stderr": 0.0016927007401501943 }, "harness|winogrande|5": { "acc": 0.5485398579321231, "acc_stderr": 0.01398611030101776 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_OpenAssistant__stablelm-7b-sft-v7-epoch-3
[ "region:us" ]
2023-08-18T10:12:17+00:00
{"pretty_name": "Evaluation run of OpenAssistant/stablelm-7b-sft-v7-epoch-3", "dataset_summary": "Dataset automatically created during the evaluation run of model [OpenAssistant/stablelm-7b-sft-v7-epoch-3](https://huggingface.co/OpenAssistant/stablelm-7b-sft-v7-epoch-3) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_OpenAssistant__stablelm-7b-sft-v7-epoch-3\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-13T03:23:25.661445](https://huggingface.co/datasets/open-llm-leaderboard/details_OpenAssistant__stablelm-7b-sft-v7-epoch-3/blob/main/results_2023-10-13T03-23-25.661445.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.05578859060402685,\n \"em_stderr\": 0.0023504280872280073,\n \"f1\": 0.10613569630872476,\n \"f1_stderr\": 0.0026144580255279513,\n \"acc\": 0.27616530425036784,\n \"acc_stderr\": 0.007839405520583978\n },\n \"harness|drop|3\": {\n \"em\": 0.05578859060402685,\n \"em_stderr\": 0.0023504280872280073,\n \"f1\": 0.10613569630872476,\n \"f1_stderr\": 0.0026144580255279513\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0037907505686125853,\n \"acc_stderr\": 0.0016927007401501943\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5485398579321231,\n \"acc_stderr\": 0.01398611030101776\n }\n}\n```", "repo_url": "https://huggingface.co/OpenAssistant/stablelm-7b-sft-v7-epoch-3", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|arc:challenge|25_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|arc:challenge|25_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_13T03_23_25.661445", "path": ["**/details_harness|drop|3_2023-10-13T03-23-25.661445.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-13T03-23-25.661445.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_13T03_23_25.661445", "path": ["**/details_harness|gsm8k|5_2023-10-13T03-23-25.661445.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-13T03-23-25.661445.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hellaswag|10_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hellaswag|10_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T17:06:42.731727.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T17:07:54.588127.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T17:07:54.588127.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T17:07:54.588127.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_13T03_23_25.661445", "path": ["**/details_harness|winogrande|5_2023-10-13T03-23-25.661445.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-13T03-23-25.661445.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_19T17_06_42.731727", "path": ["results_2023-07-19T17:06:42.731727.parquet"]}, {"split": "2023_07_19T17_07_54.588127", "path": ["results_2023-07-19T17:07:54.588127.parquet"]}, {"split": "2023_10_13T03_23_25.661445", "path": ["results_2023-10-13T03-23-25.661445.parquet"]}, {"split": "latest", "path": ["results_2023-10-13T03-23-25.661445.parquet"]}]}]}
2023-10-13T02:23:37+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of OpenAssistant/stablelm-7b-sft-v7-epoch-3 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model OpenAssistant/stablelm-7b-sft-v7-epoch-3 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-13T03:23:25.661445(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of OpenAssistant/stablelm-7b-sft-v7-epoch-3", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model OpenAssistant/stablelm-7b-sft-v7-epoch-3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-13T03:23:25.661445(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of OpenAssistant/stablelm-7b-sft-v7-epoch-3", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model OpenAssistant/stablelm-7b-sft-v7-epoch-3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-13T03:23:25.661445(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 28, 31, 176, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of OpenAssistant/stablelm-7b-sft-v7-epoch-3## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model OpenAssistant/stablelm-7b-sft-v7-epoch-3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-13T03:23:25.661445(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
2796accb90526f9cd31e5ea63ebe967d930ee486
# Dataset Card for Evaluation run of OpenAssistant/llama2-13b-orca-8k-3319 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/OpenAssistant/llama2-13b-orca-8k-3319 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [OpenAssistant/llama2-13b-orca-8k-3319](https://huggingface.co/OpenAssistant/llama2-13b-orca-8k-3319) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_OpenAssistant__llama2-13b-orca-8k-3319", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-19T09:37:05.639025](https://huggingface.co/datasets/open-llm-leaderboard/details_OpenAssistant__llama2-13b-orca-8k-3319/blob/main/results_2023-10-19T09-37-05.639025.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.07235738255033557, "em_stderr": 0.002653208755575334, "f1": 0.1714293204697988, "f1_stderr": 0.0030613909144533535, "acc": 0.44091694875395904, "acc_stderr": 0.010204605702764508 }, "harness|drop|3": { "em": 0.07235738255033557, "em_stderr": 0.002653208755575334, "f1": 0.1714293204697988, "f1_stderr": 0.0030613909144533535 }, "harness|gsm8k|5": { "acc": 0.10993176648976498, "acc_stderr": 0.008616195587865418 }, "harness|winogrande|5": { "acc": 0.7719021310181531, "acc_stderr": 0.011793015817663597 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_OpenAssistant__llama2-13b-orca-8k-3319
[ "region:us" ]
2023-08-18T10:12:27+00:00
{"pretty_name": "Evaluation run of OpenAssistant/llama2-13b-orca-8k-3319", "dataset_summary": "Dataset automatically created during the evaluation run of model [OpenAssistant/llama2-13b-orca-8k-3319](https://huggingface.co/OpenAssistant/llama2-13b-orca-8k-3319) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_OpenAssistant__llama2-13b-orca-8k-3319\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-19T09:37:05.639025](https://huggingface.co/datasets/open-llm-leaderboard/details_OpenAssistant__llama2-13b-orca-8k-3319/blob/main/results_2023-10-19T09-37-05.639025.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.07235738255033557,\n \"em_stderr\": 0.002653208755575334,\n \"f1\": 0.1714293204697988,\n \"f1_stderr\": 0.0030613909144533535,\n \"acc\": 0.44091694875395904,\n \"acc_stderr\": 0.010204605702764508\n },\n \"harness|drop|3\": {\n \"em\": 0.07235738255033557,\n \"em_stderr\": 0.002653208755575334,\n \"f1\": 0.1714293204697988,\n \"f1_stderr\": 0.0030613909144533535\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.10993176648976498,\n \"acc_stderr\": 0.008616195587865418\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7719021310181531,\n \"acc_stderr\": 0.011793015817663597\n }\n}\n```", "repo_url": "https://huggingface.co/OpenAssistant/llama2-13b-orca-8k-3319", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|arc:challenge|25_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_19T09_37_05.639025", "path": ["**/details_harness|drop|3_2023-10-19T09-37-05.639025.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-19T09-37-05.639025.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_19T09_37_05.639025", "path": ["**/details_harness|gsm8k|5_2023-10-19T09-37-05.639025.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-19T09-37-05.639025.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hellaswag|10_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-25T11:12:31.858304.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-25T11:12:31.858304.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-25T11:12:31.858304.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_19T09_37_05.639025", "path": ["**/details_harness|winogrande|5_2023-10-19T09-37-05.639025.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-19T09-37-05.639025.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_25T11_12_31.858304", "path": ["results_2023-07-25T11:12:31.858304.parquet"]}, {"split": "2023_10_19T09_37_05.639025", "path": ["results_2023-10-19T09-37-05.639025.parquet"]}, {"split": "latest", "path": ["results_2023-10-19T09-37-05.639025.parquet"]}]}]}
2023-10-19T08:37:18+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of OpenAssistant/llama2-13b-orca-8k-3319 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model OpenAssistant/llama2-13b-orca-8k-3319 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-19T09:37:05.639025(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of OpenAssistant/llama2-13b-orca-8k-3319", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model OpenAssistant/llama2-13b-orca-8k-3319 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-19T09:37:05.639025(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of OpenAssistant/llama2-13b-orca-8k-3319", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model OpenAssistant/llama2-13b-orca-8k-3319 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-19T09:37:05.639025(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 26, 31, 174, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of OpenAssistant/llama2-13b-orca-8k-3319## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model OpenAssistant/llama2-13b-orca-8k-3319 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-19T09:37:05.639025(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
28173f1bd8b62767cc87f7fb46ecd81561292100
# Dataset Card for Evaluation run of OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5](https://huggingface.co/OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_OpenAssistant__oasst-sft-4-pythia-12b-epoch-3.5", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-22T03:00:35.046242](https://huggingface.co/datasets/open-llm-leaderboard/details_OpenAssistant__oasst-sft-4-pythia-12b-epoch-3.5/blob/main/results_2023-10-22T03-00-35.046242.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.001363255033557047, "em_stderr": 0.00037786091964606887, "f1": 0.059077181208053976, "f1_stderr": 0.001394848925611238, "acc": 0.3446815500250423, "acc_stderr": 0.009023084450724785 }, "harness|drop|3": { "em": 0.001363255033557047, "em_stderr": 0.00037786091964606887, "f1": 0.059077181208053976, "f1_stderr": 0.001394848925611238 }, "harness|gsm8k|5": { "acc": 0.030326004548900682, "acc_stderr": 0.004723487465514761 }, "harness|winogrande|5": { "acc": 0.659037095501184, "acc_stderr": 0.013322681435934807 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_OpenAssistant__oasst-sft-4-pythia-12b-epoch-3.5
[ "region:us" ]
2023-08-18T10:12:36+00:00
{"pretty_name": "Evaluation run of OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5", "dataset_summary": "Dataset automatically created during the evaluation run of model [OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5](https://huggingface.co/OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_OpenAssistant__oasst-sft-4-pythia-12b-epoch-3.5\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-22T03:00:35.046242](https://huggingface.co/datasets/open-llm-leaderboard/details_OpenAssistant__oasst-sft-4-pythia-12b-epoch-3.5/blob/main/results_2023-10-22T03-00-35.046242.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.001363255033557047,\n \"em_stderr\": 0.00037786091964606887,\n \"f1\": 0.059077181208053976,\n \"f1_stderr\": 0.001394848925611238,\n \"acc\": 0.3446815500250423,\n \"acc_stderr\": 0.009023084450724785\n },\n \"harness|drop|3\": {\n \"em\": 0.001363255033557047,\n \"em_stderr\": 0.00037786091964606887,\n \"f1\": 0.059077181208053976,\n \"f1_stderr\": 0.001394848925611238\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.030326004548900682,\n \"acc_stderr\": 0.004723487465514761\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.659037095501184,\n \"acc_stderr\": 0.013322681435934807\n }\n}\n```", "repo_url": "https://huggingface.co/OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|arc:challenge|25_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_22T03_00_35.046242", "path": ["**/details_harness|drop|3_2023-10-22T03-00-35.046242.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-22T03-00-35.046242.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_22T03_00_35.046242", "path": ["**/details_harness|gsm8k|5_2023-10-22T03-00-35.046242.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-22T03-00-35.046242.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hellaswag|10_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-management|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-virology|5_2023-07-19T18:18:17.138849.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T18:18:17.138849.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-07-19T18:18:17.138849.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_22T03_00_35.046242", "path": ["**/details_harness|winogrande|5_2023-10-22T03-00-35.046242.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-22T03-00-35.046242.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_07_19T18_18_17.138849", "path": ["results_2023-07-19T18:18:17.138849.parquet"]}, {"split": "2023_10_22T03_00_35.046242", "path": ["results_2023-10-22T03-00-35.046242.parquet"]}, {"split": "latest", "path": ["results_2023-10-22T03-00-35.046242.parquet"]}]}]}
2023-10-22T02:00:47+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-22T03:00:35.046242(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-22T03:00:35.046242(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-22T03:00:35.046242(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 31, 31, 179, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-22T03:00:35.046242(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]