sha
stringlengths 40
40
| text
stringlengths 1
13.4M
| id
stringlengths 2
117
| tags
listlengths 1
7.91k
| created_at
stringlengths 25
25
| metadata
stringlengths 2
875k
| last_modified
stringlengths 25
25
| arxiv
listlengths 0
25
| languages
listlengths 0
7.91k
| tags_str
stringlengths 17
159k
| text_str
stringlengths 1
447k
| text_lists
listlengths 0
352
| processed_texts
listlengths 1
353
| tokens_length
listlengths 1
353
| input_texts
listlengths 1
40
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
505f6a0f0abe2ab64e3d977a203507716385b9a9 | # Dataset Card for "bw_spec_cls_4_14_s_200"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | arieg/bw_spec_cls_4_14_s_200 | [
"region:us"
]
| 2023-11-12T09:58:04+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "1197", "1": "1270", "2": "1276", "3": "1277"}}}}], "splits": [{"name": "train", "num_bytes": 43731623.0, "num_examples": 800}, {"name": "test", "num_bytes": 1102972.0, "num_examples": 20}], "download_size": 37991761, "dataset_size": 44834595.0}} | 2023-11-12T09:58:08+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "bw_spec_cls_4_14_s_200"
More Information needed | [
"# Dataset Card for \"bw_spec_cls_4_14_s_200\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"bw_spec_cls_4_14_s_200\"\n\nMore Information needed"
]
| [
6,
24
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"bw_spec_cls_4_14_s_200\"\n\nMore Information needed"
]
|
0197b869f4147aa69859827829f7cbf0d2d2f934 | # Dataset Card for "robot300"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | allen0523/robot300 | [
"region:us"
]
| 2023-11-12T09:59:56+00:00 | {"dataset_info": {"features": [{"name": "image", "dtype": "image"}], "splits": [{"name": "train", "num_bytes": 240903241.0, "num_examples": 300}], "download_size": 240917130, "dataset_size": 240903241.0}} | 2023-11-12T10:02:41+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "robot300"
More Information needed | [
"# Dataset Card for \"robot300\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"robot300\"\n\nMore Information needed"
]
| [
6,
12
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"robot300\"\n\nMore Information needed"
]
|
212b844a8b34a4251f5d44ab2af4e9aa1c2eb44a | # Dataset Card for "bw_spec_cls_4_15_s_200"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | arieg/bw_spec_cls_4_15_s_200 | [
"region:us"
]
| 2023-11-12T10:04:56+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "1278", "1": "1417", "2": "1427", "3": "1443"}}}}], "splits": [{"name": "train", "num_bytes": 42892698.0, "num_examples": 800}, {"name": "test", "num_bytes": 1071895.0, "num_examples": 20}], "download_size": 38128383, "dataset_size": 43964593.0}} | 2023-11-12T10:05:00+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "bw_spec_cls_4_15_s_200"
More Information needed | [
"# Dataset Card for \"bw_spec_cls_4_15_s_200\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"bw_spec_cls_4_15_s_200\"\n\nMore Information needed"
]
| [
6,
24
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"bw_spec_cls_4_15_s_200\"\n\nMore Information needed"
]
|
188ef8d27e1fa2180fe5e0f4985627a8620dd55c | # Dataset Card for "bw_spec_cls_4_16_s_200"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | arieg/bw_spec_cls_4_16_s_200 | [
"region:us"
]
| 2023-11-12T10:11:41+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "1482", "1": "1510", "2": "1544", "3": "1642"}}}}], "splits": [{"name": "train", "num_bytes": 43983230.0, "num_examples": 800}, {"name": "test", "num_bytes": 1108325.0, "num_examples": 20}], "download_size": 38471730, "dataset_size": 45091555.0}} | 2023-11-12T10:11:44+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "bw_spec_cls_4_16_s_200"
More Information needed | [
"# Dataset Card for \"bw_spec_cls_4_16_s_200\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"bw_spec_cls_4_16_s_200\"\n\nMore Information needed"
]
| [
6,
24
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"bw_spec_cls_4_16_s_200\"\n\nMore Information needed"
]
|
f59e7bcbf1df916f0f1d75dcefcd7f2aa4e02d4a | # Dataset Card for "bw_spec_cls_4_17_s_200"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | arieg/bw_spec_cls_4_17_s_200 | [
"region:us"
]
| 2023-11-12T10:18:23+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "1644", "1": "1649", "2": "1661", "3": "1663"}}}}], "splits": [{"name": "train", "num_bytes": 43937841.0, "num_examples": 800}, {"name": "test", "num_bytes": 1084667.0, "num_examples": 20}], "download_size": 39034892, "dataset_size": 45022508.0}} | 2023-11-12T10:18:27+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "bw_spec_cls_4_17_s_200"
More Information needed | [
"# Dataset Card for \"bw_spec_cls_4_17_s_200\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"bw_spec_cls_4_17_s_200\"\n\nMore Information needed"
]
| [
6,
24
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"bw_spec_cls_4_17_s_200\"\n\nMore Information needed"
]
|
347a71c0f568c802c528cebd017561c39e0e3e59 | # Dataset Card for "bw_spec_cls_4_18_s_200"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | arieg/bw_spec_cls_4_18_s_200 | [
"region:us"
]
| 2023-11-12T10:25:15+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "1666", "1": "1673", "2": "1680", "3": "1681"}}}}], "splits": [{"name": "train", "num_bytes": 46542294.0, "num_examples": 800}, {"name": "test", "num_bytes": 1182286.0, "num_examples": 20}], "download_size": 41914749, "dataset_size": 47724580.0}} | 2023-11-12T10:25:19+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "bw_spec_cls_4_18_s_200"
More Information needed | [
"# Dataset Card for \"bw_spec_cls_4_18_s_200\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"bw_spec_cls_4_18_s_200\"\n\nMore Information needed"
]
| [
6,
24
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"bw_spec_cls_4_18_s_200\"\n\nMore Information needed"
]
|
8f7224f7eff4c3bb62d9cc12d7b5058b302327e8 |
This dataset contains 150K (train + test) cleaned tweets in Azerbaijani. Tweets were collected in 2021, and filtered and cleaned by following these steps:
- Initial data were collected by using twint library. The tool is currently deprecated, cannot be used with new Twitter.
- On top of the already filtered data, I applied an additional filter to select Azerbaijani tweets with using fastText language identification model.
- Tweets were classified into 3 emotion categories: {positive: 1, negative: -1, neutral: 0} by using emojis as rule-based classifier.
- Tags, usernames, and emojis were later cleaned.
- Short tweets were filtered out. | hajili/azerbaijani_tweet_emotion_classification | [
"task_categories:text-classification",
"size_categories:100K<n<1M",
"language:az",
"license:mit",
"region:us"
]
| 2023-11-12T10:30:24+00:00 | {"language": ["az"], "license": "mit", "size_categories": ["100K<n<1M"], "task_categories": ["text-classification"]} | 2023-11-12T10:39:51+00:00 | []
| [
"az"
]
| TAGS
#task_categories-text-classification #size_categories-100K<n<1M #language-Azerbaijani #license-mit #region-us
|
This dataset contains 150K (train + test) cleaned tweets in Azerbaijani. Tweets were collected in 2021, and filtered and cleaned by following these steps:
- Initial data were collected by using twint library. The tool is currently deprecated, cannot be used with new Twitter.
- On top of the already filtered data, I applied an additional filter to select Azerbaijani tweets with using fastText language identification model.
- Tweets were classified into 3 emotion categories: {positive: 1, negative: -1, neutral: 0} by using emojis as rule-based classifier.
- Tags, usernames, and emojis were later cleaned.
- Short tweets were filtered out. | []
| [
"TAGS\n#task_categories-text-classification #size_categories-100K<n<1M #language-Azerbaijani #license-mit #region-us \n"
]
| [
41
]
| [
"passage: TAGS\n#task_categories-text-classification #size_categories-100K<n<1M #language-Azerbaijani #license-mit #region-us \n"
]
|
1c00945591ad51b946a464942f2efe27a3b14840 | # Dataset Card for "bw_spec_cls_4_19_s_200"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | arieg/bw_spec_cls_4_19_s_200 | [
"region:us"
]
| 2023-11-12T10:32:02+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "1682", "1": "1683", "2": "1684", "3": "1685"}}}}], "splits": [{"name": "train", "num_bytes": 50010946.0, "num_examples": 800}, {"name": "test", "num_bytes": 1255884.0, "num_examples": 20}], "download_size": 43829751, "dataset_size": 51266830.0}} | 2023-11-12T10:32:07+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "bw_spec_cls_4_19_s_200"
More Information needed | [
"# Dataset Card for \"bw_spec_cls_4_19_s_200\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"bw_spec_cls_4_19_s_200\"\n\nMore Information needed"
]
| [
6,
24
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"bw_spec_cls_4_19_s_200\"\n\nMore Information needed"
]
|
ca807bd810fe40842902596b0df2c57f36299e59 | # Dataset Card for "robotphoto"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | allen0523/robotphoto | [
"region:us"
]
| 2023-11-12T10:37:39+00:00 | {"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 241008411.0, "num_examples": 300}], "download_size": 240936232, "dataset_size": 241008411.0}} | 2023-11-12T10:40:35+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "robotphoto"
More Information needed | [
"# Dataset Card for \"robotphoto\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"robotphoto\"\n\nMore Information needed"
]
| [
6,
12
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"robotphoto\"\n\nMore Information needed"
]
|
ff3b2ff3cac8cc972133107d92119a38c2cac315 | # Dataset Card for "bw_spec_cls_4_20_s_200"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | arieg/bw_spec_cls_4_20_s_200 | [
"region:us"
]
| 2023-11-12T10:39:03+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "1686", "1": "1687", "2": "1688", "3": "1689"}}}}], "splits": [{"name": "train", "num_bytes": 49278489.0, "num_examples": 800}, {"name": "test", "num_bytes": 1230085.0, "num_examples": 20}], "download_size": 42684649, "dataset_size": 50508574.0}} | 2023-11-12T10:39:07+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "bw_spec_cls_4_20_s_200"
More Information needed | [
"# Dataset Card for \"bw_spec_cls_4_20_s_200\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"bw_spec_cls_4_20_s_200\"\n\nMore Information needed"
]
| [
6,
24
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"bw_spec_cls_4_20_s_200\"\n\nMore Information needed"
]
|
ef5a5b6eff9f6d6f0317459b78b209efd10b9963 | # Dataset Card for "bw_spec_cls_4_21_s_200"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | arieg/bw_spec_cls_4_21_s_200 | [
"region:us"
]
| 2023-11-12T10:46:02+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "1701", "1": "1702", "2": "1703", "3": "1704"}}}}], "splits": [{"name": "train", "num_bytes": 47438520.0, "num_examples": 800}, {"name": "test", "num_bytes": 1189696.0, "num_examples": 20}], "download_size": 41706565, "dataset_size": 48628216.0}} | 2023-11-12T10:46:05+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "bw_spec_cls_4_21_s_200"
More Information needed | [
"# Dataset Card for \"bw_spec_cls_4_21_s_200\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"bw_spec_cls_4_21_s_200\"\n\nMore Information needed"
]
| [
6,
24
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"bw_spec_cls_4_21_s_200\"\n\nMore Information needed"
]
|
0676fa2611d0ec8cd571011cbc2d52145f9dc455 | # Dataset Card for "bw_spec_cls_4_22_s_200"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | arieg/bw_spec_cls_4_22_s_200 | [
"region:us"
]
| 2023-11-12T10:52:58+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "1706", "1": "1720", "2": "1732", "3": "1733"}}}}], "splits": [{"name": "train", "num_bytes": 43566639.0, "num_examples": 800}, {"name": "test", "num_bytes": 1095432.0, "num_examples": 20}], "download_size": 38693515, "dataset_size": 44662071.0}} | 2023-11-12T10:53:02+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "bw_spec_cls_4_22_s_200"
More Information needed | [
"# Dataset Card for \"bw_spec_cls_4_22_s_200\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"bw_spec_cls_4_22_s_200\"\n\nMore Information needed"
]
| [
6,
24
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"bw_spec_cls_4_22_s_200\"\n\nMore Information needed"
]
|
0feee0054f136da4f34a0a7ca92b0c59dbe4ef1c | # Dataset Card for "pattern-net"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | danielz01/pattern-net | [
"region:us"
]
| 2023-11-12T10:59:31+00:00 | {"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": "string"}, {"name": "path", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 822501873.6, "num_examples": 30400}], "download_size": 1422604377, "dataset_size": 822501873.6}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2023-11-12T11:12:30+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "pattern-net"
More Information needed | [
"# Dataset Card for \"pattern-net\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"pattern-net\"\n\nMore Information needed"
]
| [
6,
14
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"pattern-net\"\n\nMore Information needed"
]
|
f3434addbfe01b4bef82d755c890106a67c30eba | # Dataset Card for "bw_spec_cls_4_23_s_200"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | arieg/bw_spec_cls_4_23_s_200 | [
"region:us"
]
| 2023-11-12T10:59:55+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "1735", "1": "1736", "2": "1883", "3": "1891"}}}}], "splits": [{"name": "train", "num_bytes": 43184098.0, "num_examples": 800}, {"name": "test", "num_bytes": 1095480.0, "num_examples": 20}], "download_size": 37432845, "dataset_size": 44279578.0}} | 2023-11-12T11:00:04+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "bw_spec_cls_4_23_s_200"
More Information needed | [
"# Dataset Card for \"bw_spec_cls_4_23_s_200\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"bw_spec_cls_4_23_s_200\"\n\nMore Information needed"
]
| [
6,
24
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"bw_spec_cls_4_23_s_200\"\n\nMore Information needed"
]
|
c42322dfa1dcf259a8e7e921a92ecfae5d5302e9 | # Dataset Card for "bw_spec_cls_4_24_s_200"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | arieg/bw_spec_cls_4_24_s_200 | [
"region:us"
]
| 2023-11-12T11:07:00+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "1924", "1": "1925", "2": "1929", "3": "1930"}}}}], "splits": [{"name": "train", "num_bytes": 44364265.0, "num_examples": 800}, {"name": "test", "num_bytes": 1116045.0, "num_examples": 20}], "download_size": 38996336, "dataset_size": 45480310.0}} | 2023-11-12T11:07:04+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "bw_spec_cls_4_24_s_200"
More Information needed | [
"# Dataset Card for \"bw_spec_cls_4_24_s_200\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"bw_spec_cls_4_24_s_200\"\n\nMore Information needed"
]
| [
6,
24
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"bw_spec_cls_4_24_s_200\"\n\nMore Information needed"
]
|
804feb4857d93999d5a24b1ea6b0d14023c20301 | # Dataset Card for "java-encoded-small"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | JoaoJunior/java-encoded-small | [
"region:us"
]
| 2023-11-12T11:20:22+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "rem", "dtype": "string"}, {"name": "add", "dtype": "string"}, {"name": "context", "dtype": "string"}, {"name": "meta", "dtype": "string"}, {"name": "input_ids", "sequence": "int32"}, {"name": "attention_mask", "sequence": "int8"}, {"name": "labels", "sequence": "int64"}], "splits": [{"name": "train", "num_bytes": 2551158, "num_examples": 800}, {"name": "test", "num_bytes": 641178, "num_examples": 200}], "download_size": 391779, "dataset_size": 3192336}} | 2023-11-12T11:20:30+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "java-encoded-small"
More Information needed | [
"# Dataset Card for \"java-encoded-small\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"java-encoded-small\"\n\nMore Information needed"
]
| [
6,
17
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"java-encoded-small\"\n\nMore Information needed"
]
|
7d0477d886501c43b9b90d8a1b0a53d5dbf8224a | # Dataset Card for "commonvoice_13_0_pt_48kHz_simplificado"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | aomocelin/commonvoice_13_0_pt_48kHz_simplificado | [
"region:us"
]
| 2023-11-12T11:27:42+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "audio", "dtype": {"audio": {"sampling_rate": 48000}}}, {"name": "sentence", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 820757518.12, "num_examples": 29020}, {"name": "test", "num_bytes": 278039186.928, "num_examples": 9072}], "download_size": 1103544550, "dataset_size": 1098796705.0479999}} | 2023-11-12T11:28:41+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "commonvoice_13_0_pt_48kHz_simplificado"
More Information needed | [
"# Dataset Card for \"commonvoice_13_0_pt_48kHz_simplificado\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"commonvoice_13_0_pt_48kHz_simplificado\"\n\nMore Information needed"
]
| [
6,
28
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"commonvoice_13_0_pt_48kHz_simplificado\"\n\nMore Information needed"
]
|
82b168d567264d3586ad845eaf2c26ee0ed7e70c | # Dataset Card for "bw_spec_cls_4_02_noise_200"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | arieg/bw_spec_cls_4_02_noise_200 | [
"region:us"
]
| 2023-11-12T11:30:26+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "197", "1": "200", "2": "203", "3": "204"}}}}], "splits": [{"name": "train", "num_bytes": 48933498.0, "num_examples": 800}, {"name": "test", "num_bytes": 1229175.0, "num_examples": 20}], "download_size": 26301580, "dataset_size": 50162673.0}} | 2023-11-12T11:30:29+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "bw_spec_cls_4_02_noise_200"
More Information needed | [
"# Dataset Card for \"bw_spec_cls_4_02_noise_200\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"bw_spec_cls_4_02_noise_200\"\n\nMore Information needed"
]
| [
6,
25
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"bw_spec_cls_4_02_noise_200\"\n\nMore Information needed"
]
|
6f6817ebe3bc2b9e7344ecc0c23c25e90bcad537 | # Dataset Card for "bw_spec_cls_4_03_noise_200"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | arieg/bw_spec_cls_4_03_noise_200 | [
"region:us"
]
| 2023-11-12T11:36:12+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "207", "1": "210", "2": "211", "3": "212"}}}}], "splits": [{"name": "train", "num_bytes": 47704924.0, "num_examples": 800}, {"name": "test", "num_bytes": 1192825.0, "num_examples": 20}], "download_size": 23920920, "dataset_size": 48897749.0}} | 2023-11-12T11:36:15+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "bw_spec_cls_4_03_noise_200"
More Information needed | [
"# Dataset Card for \"bw_spec_cls_4_03_noise_200\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"bw_spec_cls_4_03_noise_200\"\n\nMore Information needed"
]
| [
6,
25
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"bw_spec_cls_4_03_noise_200\"\n\nMore Information needed"
]
|
29f7714d2112546207b5fc4b581d675ecd20e099 | # Dataset Card for "bw_spec_cls_4_04_noise_200"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | arieg/bw_spec_cls_4_04_noise_200 | [
"region:us"
]
| 2023-11-12T11:41:54+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "213", "1": "255", "2": "256", "3": "368"}}}}], "splits": [{"name": "train", "num_bytes": 43614313.0, "num_examples": 800}, {"name": "test", "num_bytes": 1084810.0, "num_examples": 20}], "download_size": 21835920, "dataset_size": 44699123.0}} | 2023-11-12T11:41:57+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "bw_spec_cls_4_04_noise_200"
More Information needed | [
"# Dataset Card for \"bw_spec_cls_4_04_noise_200\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"bw_spec_cls_4_04_noise_200\"\n\nMore Information needed"
]
| [
6,
25
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"bw_spec_cls_4_04_noise_200\"\n\nMore Information needed"
]
|
f9abc2399f4671f8452346904d1b5b1aeec89a4d | # Dataset Card for "machine_learning_questions"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | mjphayes/machine_learning_questions | [
"region:us"
]
| 2023-11-12T11:42:42+00:00 | {"dataset_info": {"features": [{"name": "question", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "__index_level_0__", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 120983.07547169812, "num_examples": 508}, {"name": "test", "num_bytes": 30483.924528301886, "num_examples": 128}], "download_size": 85722, "dataset_size": 151467.0}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}]} | 2023-11-12T11:52:33+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "machine_learning_questions"
More Information needed | [
"# Dataset Card for \"machine_learning_questions\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"machine_learning_questions\"\n\nMore Information needed"
]
| [
6,
16
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"machine_learning_questions\"\n\nMore Information needed"
]
|
82af40f1d701d03269f777aee09ceed80bb96b1c | # Dataset Card for "byt-malicious-url-treatment"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | bgspaditya/byt-malicious-url-treatment | [
"region:us"
]
| 2023-11-12T11:45:48+00:00 | {"dataset_info": {"features": [{"name": "url", "dtype": "string"}, {"name": "type", "dtype": "string"}, {"name": "type_code", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 42342364.57124805, "num_examples": 512794}, {"name": "val", "num_bytes": 5292774.928436036, "num_examples": 64099}, {"name": "test", "num_bytes": 5292857.500315916, "num_examples": 64100}], "download_size": 31993322, "dataset_size": 52927997.0}} | 2023-11-12T12:04:00+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "byt-malicious-url-treatment"
More Information needed | [
"# Dataset Card for \"byt-malicious-url-treatment\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"byt-malicious-url-treatment\"\n\nMore Information needed"
]
| [
6,
20
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"byt-malicious-url-treatment\"\n\nMore Information needed"
]
|
aa92b4b709f7ef6d8a70b2b7963813bd64ac078d | # Dataset Card for "bw_spec_cls_4_05_noise_200"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | arieg/bw_spec_cls_4_05_noise_200 | [
"region:us"
]
| 2023-11-12T11:47:35+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "424", "1": "534", "2": "540", "3": "546"}}}}], "splits": [{"name": "train", "num_bytes": 44163051.0, "num_examples": 800}, {"name": "test", "num_bytes": 1092283.0, "num_examples": 20}], "download_size": 24258752, "dataset_size": 45255334.0}} | 2023-11-12T11:47:39+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "bw_spec_cls_4_05_noise_200"
More Information needed | [
"# Dataset Card for \"bw_spec_cls_4_05_noise_200\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"bw_spec_cls_4_05_noise_200\"\n\nMore Information needed"
]
| [
6,
25
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"bw_spec_cls_4_05_noise_200\"\n\nMore Information needed"
]
|
ff59fbaca58b8c9e74383f8dcf5e700a5499b299 | # Dataset Card for "bw_spec_cls_4_06_noise_200"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | arieg/bw_spec_cls_4_06_noise_200 | [
"region:us"
]
| 2023-11-12T11:53:18+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "574", "1": "615", "2": "620", "3": "621"}}}}], "splits": [{"name": "train", "num_bytes": 43039158.0, "num_examples": 800}, {"name": "test", "num_bytes": 1078413.0, "num_examples": 20}], "download_size": 22347406, "dataset_size": 44117571.0}} | 2023-11-12T11:53:20+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "bw_spec_cls_4_06_noise_200"
More Information needed | [
"# Dataset Card for \"bw_spec_cls_4_06_noise_200\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"bw_spec_cls_4_06_noise_200\"\n\nMore Information needed"
]
| [
6,
25
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"bw_spec_cls_4_06_noise_200\"\n\nMore Information needed"
]
|
bdea72188cb8db5e3996273821b77cfd5ac8e306 | # Dataset Card for "bw_spec_cls_4_07_noise_200"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | arieg/bw_spec_cls_4_07_noise_200 | [
"region:us"
]
| 2023-11-12T11:59:01+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "625", "1": "666", "2": "667", "3": "676"}}}}], "splits": [{"name": "train", "num_bytes": 44712963.0, "num_examples": 800}, {"name": "test", "num_bytes": 1114021.0, "num_examples": 20}], "download_size": 25251789, "dataset_size": 45826984.0}} | 2023-11-12T11:59:04+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "bw_spec_cls_4_07_noise_200"
More Information needed | [
"# Dataset Card for \"bw_spec_cls_4_07_noise_200\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"bw_spec_cls_4_07_noise_200\"\n\nMore Information needed"
]
| [
6,
25
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"bw_spec_cls_4_07_noise_200\"\n\nMore Information needed"
]
|
29e15d8f5f0923d3c1822565932c521e657ee162 | # Dataset Card for "bw_spec_cls_4_08_noise_200"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | arieg/bw_spec_cls_4_08_noise_200 | [
"region:us"
]
| 2023-11-12T12:04:43+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "694", "1": "695", "2": "714", "3": "715"}}}}], "splits": [{"name": "train", "num_bytes": 44590621.0, "num_examples": 800}, {"name": "test", "num_bytes": 1109208.0, "num_examples": 20}], "download_size": 22473093, "dataset_size": 45699829.0}} | 2023-11-12T12:04:46+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "bw_spec_cls_4_08_noise_200"
More Information needed | [
"# Dataset Card for \"bw_spec_cls_4_08_noise_200\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"bw_spec_cls_4_08_noise_200\"\n\nMore Information needed"
]
| [
6,
25
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"bw_spec_cls_4_08_noise_200\"\n\nMore Information needed"
]
|
b17db89a4e37fcfbdf79cfa62d32ad68c3bcc379 | # Dataset Card for "bw_spec_cls_4_09_noise_200"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | arieg/bw_spec_cls_4_09_noise_200 | [
"region:us"
]
| 2023-11-12T12:10:23+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "716", "1": "718", "2": "777", "3": "814"}}}}], "splits": [{"name": "train", "num_bytes": 43659606.0, "num_examples": 800}, {"name": "test", "num_bytes": 1086202.0, "num_examples": 20}], "download_size": 23169138, "dataset_size": 44745808.0}} | 2023-11-12T12:10:27+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "bw_spec_cls_4_09_noise_200"
More Information needed | [
"# Dataset Card for \"bw_spec_cls_4_09_noise_200\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"bw_spec_cls_4_09_noise_200\"\n\nMore Information needed"
]
| [
6,
25
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"bw_spec_cls_4_09_noise_200\"\n\nMore Information needed"
]
|
df151ce4122b141dea81233842f986ca111918cf | # Dataset Card for "bw_spec_cls_4_10_noise_200"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | arieg/bw_spec_cls_4_10_noise_200 | [
"region:us"
]
| 2023-11-12T12:16:01+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "821", "1": "822", "2": "825", "3": "853"}}}}], "splits": [{"name": "train", "num_bytes": 44880622.0, "num_examples": 800}, {"name": "test", "num_bytes": 1121364.0, "num_examples": 20}], "download_size": 23513405, "dataset_size": 46001986.0}} | 2023-11-12T12:16:04+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "bw_spec_cls_4_10_noise_200"
More Information needed | [
"# Dataset Card for \"bw_spec_cls_4_10_noise_200\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"bw_spec_cls_4_10_noise_200\"\n\nMore Information needed"
]
| [
6,
25
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"bw_spec_cls_4_10_noise_200\"\n\nMore Information needed"
]
|
3d245b3e81555965555ea184bb9cd00f90d48c47 | # Dataset Card for "bw_spec_cls_4_11_noise_200"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | arieg/bw_spec_cls_4_11_noise_200 | [
"region:us"
]
| 2023-11-12T12:21:34+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "897", "1": "995", "2": "997", "3": "998"}}}}], "splits": [{"name": "train", "num_bytes": 35205161.0, "num_examples": 800}, {"name": "test", "num_bytes": 880587.0, "num_examples": 20}], "download_size": 17615671, "dataset_size": 36085748.0}} | 2023-11-12T12:21:39+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "bw_spec_cls_4_11_noise_200"
More Information needed | [
"# Dataset Card for \"bw_spec_cls_4_11_noise_200\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"bw_spec_cls_4_11_noise_200\"\n\nMore Information needed"
]
| [
6,
25
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"bw_spec_cls_4_11_noise_200\"\n\nMore Information needed"
]
|
01c9f3cbbc386b46001d44f888777a9e94220bd9 | # Dataset Card for "ambito_juridico_artigos"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | celsowm/ambito_juridico_artigos | [
"region:us"
]
| 2023-11-12T12:24:47+00:00 | {"dataset_info": {"features": [{"name": "titulo", "dtype": "string"}, {"name": "resumo", "dtype": "string"}, {"name": "categoria", "dtype": "string"}, {"name": "texto", "dtype": "string"}, {"name": "link", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 390296460, "num_examples": 11373}], "download_size": 198397380, "dataset_size": 390296460}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2023-12-03T15:57:14+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "ambito_juridico_artigos"
More Information needed | [
"# Dataset Card for \"ambito_juridico_artigos\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"ambito_juridico_artigos\"\n\nMore Information needed"
]
| [
6,
18
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"ambito_juridico_artigos\"\n\nMore Information needed"
]
|
3a751d64a2ae200bb34b2c71c0794ea86edf1c15 | # Dataset Card for "bw_spec_cls_4_12_noise_200"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | arieg/bw_spec_cls_4_12_noise_200 | [
"region:us"
]
| 2023-11-12T12:27:17+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "1039", "1": "1040", "2": "1082", "3": "1083"}}}}], "splits": [{"name": "train", "num_bytes": 43275557.0, "num_examples": 800}, {"name": "test", "num_bytes": 1080285.0, "num_examples": 20}], "download_size": 23012897, "dataset_size": 44355842.0}} | 2023-11-12T12:27:21+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "bw_spec_cls_4_12_noise_200"
More Information needed | [
"# Dataset Card for \"bw_spec_cls_4_12_noise_200\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"bw_spec_cls_4_12_noise_200\"\n\nMore Information needed"
]
| [
6,
25
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"bw_spec_cls_4_12_noise_200\"\n\nMore Information needed"
]
|
269399b58f6898e4e9a79807e17b54ebde377da7 | # Dataset Card for "bw_spec_cls_4_13_noise_200"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | arieg/bw_spec_cls_4_13_noise_200 | [
"region:us"
]
| 2023-11-12T12:32:57+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "1102", "1": "1193", "2": "1195", "3": "1196"}}}}], "splits": [{"name": "train", "num_bytes": 43664180.0, "num_examples": 800}, {"name": "test", "num_bytes": 1091779.0, "num_examples": 20}], "download_size": 23561812, "dataset_size": 44755959.0}} | 2023-11-12T12:33:00+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "bw_spec_cls_4_13_noise_200"
More Information needed | [
"# Dataset Card for \"bw_spec_cls_4_13_noise_200\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"bw_spec_cls_4_13_noise_200\"\n\nMore Information needed"
]
| [
6,
25
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"bw_spec_cls_4_13_noise_200\"\n\nMore Information needed"
]
|
5cf3ae2d17781793213273bdbdee9e503d551c1c | # Dataset Card for "bw_spec_cls_4_14_noise_200"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | arieg/bw_spec_cls_4_14_noise_200 | [
"region:us"
]
| 2023-11-12T12:38:41+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "1197", "1": "1270", "2": "1276", "3": "1277"}}}}], "splits": [{"name": "train", "num_bytes": 44325249.0, "num_examples": 800}, {"name": "test", "num_bytes": 1108340.0, "num_examples": 20}], "download_size": 23993588, "dataset_size": 45433589.0}} | 2023-11-12T12:38:44+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "bw_spec_cls_4_14_noise_200"
More Information needed | [
"# Dataset Card for \"bw_spec_cls_4_14_noise_200\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"bw_spec_cls_4_14_noise_200\"\n\nMore Information needed"
]
| [
6,
25
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"bw_spec_cls_4_14_noise_200\"\n\nMore Information needed"
]
|
3a1f14c47e8e2cac5f23be00bb32c22fdb5c57c9 | # Dataset Card for "bw_spec_cls_4_15_noise_200"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | arieg/bw_spec_cls_4_15_noise_200 | [
"region:us"
]
| 2023-11-12T12:44:24+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "1278", "1": "1417", "2": "1427", "3": "1443"}}}}], "splits": [{"name": "train", "num_bytes": 43317245.0, "num_examples": 800}, {"name": "test", "num_bytes": 1084971.0, "num_examples": 20}], "download_size": 22038759, "dataset_size": 44402216.0}} | 2023-11-12T12:44:28+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "bw_spec_cls_4_15_noise_200"
More Information needed | [
"# Dataset Card for \"bw_spec_cls_4_15_noise_200\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"bw_spec_cls_4_15_noise_200\"\n\nMore Information needed"
]
| [
6,
25
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"bw_spec_cls_4_15_noise_200\"\n\nMore Information needed"
]
|
f1177c6262a05940497b67c376cea166ff7868e6 | # Dataset Card for "processed_chart_to_table_mix"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | nourheshamshaheen/processed_chart_to_table_mix | [
"region:us"
]
| 2023-11-12T12:45:13+00:00 | {"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "text", "dtype": "string"}, {"name": "type", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 101062301.385, "num_examples": 2245}, {"name": "test", "num_bytes": 25059693.0, "num_examples": 562}], "download_size": 108892113, "dataset_size": 126121994.385}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}]} | 2023-11-12T12:58:47+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "processed_chart_to_table_mix"
More Information needed | [
"# Dataset Card for \"processed_chart_to_table_mix\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"processed_chart_to_table_mix\"\n\nMore Information needed"
]
| [
6,
21
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"processed_chart_to_table_mix\"\n\nMore Information needed"
]
|
dd890c2d95f4720be6c512c3ceccfa145ead7662 | # Dataset Card for "attackgpt_base"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | mmcho1157/attackgpt_base | [
"region:us"
]
| 2023-11-12T12:47:19+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}], "dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 16440, "num_examples": 70}], "download_size": 2433, "dataset_size": 16440}} | 2023-11-12T12:47:20+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "attackgpt_base"
More Information needed | [
"# Dataset Card for \"attackgpt_base\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"attackgpt_base\"\n\nMore Information needed"
]
| [
6,
16
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"attackgpt_base\"\n\nMore Information needed"
]
|
9297fd6f8625c402be476474043dddb7437b9e08 | # Dataset Card for "bw_spec_cls_4_16_noise_200"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | arieg/bw_spec_cls_4_16_noise_200 | [
"region:us"
]
| 2023-11-12T12:50:08+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "1482", "1": "1510", "2": "1544", "3": "1642"}}}}], "splits": [{"name": "train", "num_bytes": 44641780.0, "num_examples": 800}, {"name": "test", "num_bytes": 1123762.0, "num_examples": 20}], "download_size": 24073355, "dataset_size": 45765542.0}} | 2023-11-12T12:50:12+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "bw_spec_cls_4_16_noise_200"
More Information needed | [
"# Dataset Card for \"bw_spec_cls_4_16_noise_200\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"bw_spec_cls_4_16_noise_200\"\n\nMore Information needed"
]
| [
6,
25
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"bw_spec_cls_4_16_noise_200\"\n\nMore Information needed"
]
|
448f8587bd033f563822e1a110f0f44d42a0adb6 | # Dataset Card for "AIPD_nlp_granted_claims"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | patent/AIPD_nlp_granted_claims | [
"region:us"
]
| 2023-11-12T12:50:26+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}, {"split": "valid", "path": "data/valid-*"}]}], "dataset_info": {"features": [{"name": "patent_num", "dtype": "int64"}, {"name": "claim_num1", "dtype": "int64"}, {"name": "claim_num2", "dtype": "int64"}, {"name": "instruction", "dtype": "string"}, {"name": "input", "dtype": "string"}, {"name": "output", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 1311491690.1458514, "num_examples": 906087}, {"name": "test", "num_bytes": 72861855.63886473, "num_examples": 50339}, {"name": "valid", "num_bytes": 72860408.21528383, "num_examples": 50338}], "download_size": 562077206, "dataset_size": 1457213954.0}} | 2023-11-12T13:39:40+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "AIPD_nlp_granted_claims"
More Information needed | [
"# Dataset Card for \"AIPD_nlp_granted_claims\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"AIPD_nlp_granted_claims\"\n\nMore Information needed"
]
| [
6,
21
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"AIPD_nlp_granted_claims\"\n\nMore Information needed"
]
|
e5ad58e5407d5c113b2afd3f3cc56ed23c1a1ac8 | # Dataset Card for "clean_title"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | Eitanli/clean_title | [
"region:us"
]
| 2023-11-12T12:51:31+00:00 | {"dataset_info": {"features": [{"name": "id", "dtype": "int64"}, {"name": "recipe", "dtype": "string"}, {"name": "title_cleaned", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 108673191, "num_examples": 74465}], "download_size": 55560085, "dataset_size": 108673191}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2023-11-13T11:35:03+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "clean_title"
More Information needed | [
"# Dataset Card for \"clean_title\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"clean_title\"\n\nMore Information needed"
]
| [
6,
14
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"clean_title\"\n\nMore Information needed"
]
|
764b7654d2df3b2871172b616155b72611f09f47 | # Dataset Card for "bw_spec_cls_4_17_noise_200"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | arieg/bw_spec_cls_4_17_noise_200 | [
"region:us"
]
| 2023-11-12T12:55:50+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "1644", "1": "1649", "2": "1661", "3": "1663"}}}}], "splits": [{"name": "train", "num_bytes": 44066224.0, "num_examples": 800}, {"name": "test", "num_bytes": 1101943.0, "num_examples": 20}], "download_size": 22426644, "dataset_size": 45168167.0}} | 2023-11-12T12:55:53+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "bw_spec_cls_4_17_noise_200"
More Information needed | [
"# Dataset Card for \"bw_spec_cls_4_17_noise_200\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"bw_spec_cls_4_17_noise_200\"\n\nMore Information needed"
]
| [
6,
25
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"bw_spec_cls_4_17_noise_200\"\n\nMore Information needed"
]
|
7438df8a588e4e85227bfc5e3f9c7e44eab07c92 | # Dataset Card for "nike_laion"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | toilaluan/nike_laion | [
"region:us"
]
| 2023-11-12T12:59:17+00:00 | {"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "url", "dtype": "string"}, {"name": "caption", "dtype": "string"}, {"name": "id", "dtype": "int64"}, {"name": "similarity", "dtype": "float64"}], "splits": [{"name": "train", "num_bytes": 343403671.807, "num_examples": 5117}], "download_size": 282913216, "dataset_size": 343403671.807}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2023-11-12T13:30:35+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "nike_laion"
More Information needed | [
"# Dataset Card for \"nike_laion\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"nike_laion\"\n\nMore Information needed"
]
| [
6,
14
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"nike_laion\"\n\nMore Information needed"
]
|
401f40f15ebd96cec30aa0a38f1269c454d9c1bd | # Dataset Card for "bw_spec_cls_4_18_noise_200"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | arieg/bw_spec_cls_4_18_noise_200 | [
"region:us"
]
| 2023-11-12T13:01:32+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "1666", "1": "1673", "2": "1680", "3": "1681"}}}}], "splits": [{"name": "train", "num_bytes": 47436633.0, "num_examples": 800}, {"name": "test", "num_bytes": 1187175.0, "num_examples": 20}], "download_size": 26409307, "dataset_size": 48623808.0}} | 2023-11-12T13:01:35+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "bw_spec_cls_4_18_noise_200"
More Information needed | [
"# Dataset Card for \"bw_spec_cls_4_18_noise_200\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"bw_spec_cls_4_18_noise_200\"\n\nMore Information needed"
]
| [
6,
25
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"bw_spec_cls_4_18_noise_200\"\n\nMore Information needed"
]
|
c4ef04b01bee2c37e1521ccdb3096a8dda7b2230 | # Dataset Card for "bw_spec_cls_4_19_noise_200"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | arieg/bw_spec_cls_4_19_noise_200 | [
"region:us"
]
| 2023-11-12T13:07:13+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "1682", "1": "1683", "2": "1684", "3": "1685"}}}}], "splits": [{"name": "train", "num_bytes": 51758534.0, "num_examples": 800}, {"name": "test", "num_bytes": 1291509.0, "num_examples": 20}], "download_size": 29420147, "dataset_size": 53050043.0}} | 2023-11-12T13:07:22+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "bw_spec_cls_4_19_noise_200"
More Information needed | [
"# Dataset Card for \"bw_spec_cls_4_19_noise_200\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"bw_spec_cls_4_19_noise_200\"\n\nMore Information needed"
]
| [
6,
25
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"bw_spec_cls_4_19_noise_200\"\n\nMore Information needed"
]
|
0e03c595774e463784e9acee55f3c1cbbc4a51c0 | # Dataset Card for "bw_spec_cls_4_20_noise_200"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | arieg/bw_spec_cls_4_20_noise_200 | [
"region:us"
]
| 2023-11-12T13:12:58+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "1686", "1": "1687", "2": "1688", "3": "1689"}}}}], "splits": [{"name": "train", "num_bytes": 50946335.0, "num_examples": 800}, {"name": "test", "num_bytes": 1272335.0, "num_examples": 20}], "download_size": 26226274, "dataset_size": 52218670.0}} | 2023-11-12T13:13:02+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "bw_spec_cls_4_20_noise_200"
More Information needed | [
"# Dataset Card for \"bw_spec_cls_4_20_noise_200\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"bw_spec_cls_4_20_noise_200\"\n\nMore Information needed"
]
| [
6,
25
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"bw_spec_cls_4_20_noise_200\"\n\nMore Information needed"
]
|
a2c3242f9f537f3e124bda420bc430e519a3fce5 | # Dataset Card for "bw_spec_cls_4_21_noise_200"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | arieg/bw_spec_cls_4_21_noise_200 | [
"region:us"
]
| 2023-11-12T13:18:42+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "1701", "1": "1702", "2": "1703", "3": "1704"}}}}], "splits": [{"name": "train", "num_bytes": 48774605.0, "num_examples": 800}, {"name": "test", "num_bytes": 1227583.0, "num_examples": 20}], "download_size": 24700253, "dataset_size": 50002188.0}} | 2023-11-12T13:18:45+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "bw_spec_cls_4_21_noise_200"
More Information needed | [
"# Dataset Card for \"bw_spec_cls_4_21_noise_200\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"bw_spec_cls_4_21_noise_200\"\n\nMore Information needed"
]
| [
6,
25
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"bw_spec_cls_4_21_noise_200\"\n\nMore Information needed"
]
|
f48842e343d8c667c745e12b864dc14adb806f01 | # Dataset Card for "new_typed_chart_to_table_mix"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | nourheshamshaheen/new_typed_chart_to_table_mix | [
"region:us"
]
| 2023-11-12T13:21:06+00:00 | {"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "text", "dtype": "string"}, {"name": "type", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 25058693.0, "num_examples": 562}], "download_size": 21501493, "dataset_size": 25058693.0}, "configs": [{"config_name": "default", "data_files": [{"split": "test", "path": "data/test-*"}]}]} | 2023-11-12T13:21:09+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "new_typed_chart_to_table_mix"
More Information needed | [
"# Dataset Card for \"new_typed_chart_to_table_mix\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"new_typed_chart_to_table_mix\"\n\nMore Information needed"
]
| [
6,
23
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"new_typed_chart_to_table_mix\"\n\nMore Information needed"
]
|
1a8b428586471f3ffce42363c42415dfc88a37ae | # Dataset Card for "bw_spec_cls_4_22_noise_200"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | arieg/bw_spec_cls_4_22_noise_200 | [
"region:us"
]
| 2023-11-12T13:24:21+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "1706", "1": "1720", "2": "1732", "3": "1733"}}}}], "splits": [{"name": "train", "num_bytes": 43598869.0, "num_examples": 800}, {"name": "test", "num_bytes": 1089488.0, "num_examples": 20}], "download_size": 23887029, "dataset_size": 44688357.0}} | 2023-11-12T13:24:28+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "bw_spec_cls_4_22_noise_200"
More Information needed | [
"# Dataset Card for \"bw_spec_cls_4_22_noise_200\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"bw_spec_cls_4_22_noise_200\"\n\nMore Information needed"
]
| [
6,
25
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"bw_spec_cls_4_22_noise_200\"\n\nMore Information needed"
]
|
c17475b5c4f1eca604fc11a98fbf83c11a0b6d57 | # Dataset Card for "enem-2023-dia-1"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | dominguesm/enem-2023-dia-1 | [
"region:us"
]
| 2023-11-12T13:29:50+00:00 | {"dataset_info": {"features": [{"name": "question", "dtype": "string"}, {"name": "resolution", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 151235, "num_examples": 89}], "download_size": 105603, "dataset_size": 151235}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2023-11-12T13:29:54+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "enem-2023-dia-1"
More Information needed | [
"# Dataset Card for \"enem-2023-dia-1\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"enem-2023-dia-1\"\n\nMore Information needed"
]
| [
6,
17
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"enem-2023-dia-1\"\n\nMore Information needed"
]
|
cf1fefd16c0dca329fb895d9f7d9b091c1424c3c | # Dataset Card for "bw_spec_cls_4_23_noise_200"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | arieg/bw_spec_cls_4_23_noise_200 | [
"region:us"
]
| 2023-11-12T13:30:01+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "1735", "1": "1736", "2": "1883", "3": "1891"}}}}], "splits": [{"name": "train", "num_bytes": 43212950.0, "num_examples": 800}, {"name": "test", "num_bytes": 1078883.0, "num_examples": 20}], "download_size": 24155710, "dataset_size": 44291833.0}} | 2023-11-12T13:30:07+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "bw_spec_cls_4_23_noise_200"
More Information needed | [
"# Dataset Card for \"bw_spec_cls_4_23_noise_200\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"bw_spec_cls_4_23_noise_200\"\n\nMore Information needed"
]
| [
6,
25
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"bw_spec_cls_4_23_noise_200\"\n\nMore Information needed"
]
|
0e77e30b51ad17d52a1dc3394258804bd45016cf | # Dataset Card for "acronym-identification-1k"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | wt-golf/acronym-identification-1k | [
"region:us"
]
| 2023-11-12T13:35:06+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "labels", "sequence": "int64"}, {"name": "tokens", "sequence": "string"}], "splits": [{"name": "train", "num_bytes": 555254, "num_examples": 1000}, {"name": "validation", "num_bytes": 536083, "num_examples": 1000}, {"name": "test", "num_bytes": 568935, "num_examples": 1000}], "download_size": 312635, "dataset_size": 1660272}} | 2023-11-12T13:35:10+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "acronym-identification-1k"
More Information needed | [
"# Dataset Card for \"acronym-identification-1k\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"acronym-identification-1k\"\n\nMore Information needed"
]
| [
6,
17
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"acronym-identification-1k\"\n\nMore Information needed"
]
|
f0f71a8e766ae28e583d3df64581abd30f46ec0d | # Dataset Card for "bw_spec_cls_4_24_noise_200"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | arieg/bw_spec_cls_4_24_noise_200 | [
"region:us"
]
| 2023-11-12T13:35:41+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "1924", "1": "1925", "2": "1929", "3": "1930"}}}}], "splits": [{"name": "train", "num_bytes": 45313146.0, "num_examples": 800}, {"name": "test", "num_bytes": 1134012.0, "num_examples": 20}], "download_size": 23448998, "dataset_size": 46447158.0}} | 2023-11-12T13:35:44+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "bw_spec_cls_4_24_noise_200"
More Information needed | [
"# Dataset Card for \"bw_spec_cls_4_24_noise_200\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"bw_spec_cls_4_24_noise_200\"\n\nMore Information needed"
]
| [
6,
25
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"bw_spec_cls_4_24_noise_200\"\n\nMore Information needed"
]
|
0d7d4d3a512349cba0c8ad8f6de79662f0cfd10a | # Dataset Card for "CFD_Dataset"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | rishitunu/CFD_Dataset | [
"region:us"
]
| 2023-11-12T13:45:01+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": "image"}], "splits": [{"name": "train", "num_bytes": 24957387.82, "num_examples": 1345}, {"name": "test", "num_bytes": 38095714.106, "num_examples": 2018}], "download_size": 36703350, "dataset_size": 63053101.926}} | 2023-11-12T13:54:29+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "CFD_Dataset"
More Information needed | [
"# Dataset Card for \"CFD_Dataset\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"CFD_Dataset\"\n\nMore Information needed"
]
| [
6,
15
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"CFD_Dataset\"\n\nMore Information needed"
]
|
8278cb1394987fc454706c3061ea36fdccf14039 | # Dataset Card for "temp"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | nourheshamshaheen/temp | [
"region:us"
]
| 2023-11-12T14:34:41+00:00 | {"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "text", "dtype": "string"}, {"name": "type", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 25058975.0, "num_examples": 562}], "download_size": 21501906, "dataset_size": 25058975.0}, "configs": [{"config_name": "default", "data_files": [{"split": "test", "path": "data/test-*"}]}]} | 2023-11-12T14:34:44+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "temp"
More Information needed | [
"# Dataset Card for \"temp\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"temp\"\n\nMore Information needed"
]
| [
6,
11
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"temp\"\n\nMore Information needed"
]
|
76244339eff533a079e895ebcdcf54a274fd2a8f |
# Bangumi Image Base of Shoujo Kageki Revue Starlight
This is the image base of bangumi Shoujo Kageki Revue Starlight, we detected 55 characters, 3633 images in total. The full dataset is [here](all.zip).
**Please note that these image bases are not guaranteed to be 100% cleaned, they may be noisy actual.** If you intend to manually train models using this dataset, we recommend performing necessary preprocessing on the downloaded dataset to eliminate potential noisy samples (approximately 1% probability).
Here is the characters' preview:
| # | Images | Download | Preview 1 | Preview 2 | Preview 3 | Preview 4 | Preview 5 | Preview 6 | Preview 7 | Preview 8 |
|:------|---------:|:---------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|
| 0 | 65 | [Download](0/dataset.zip) |  |  |  |  |  |  |  |  |
| 1 | 302 | [Download](1/dataset.zip) |  |  |  |  |  |  |  |  |
| 2 | 201 | [Download](2/dataset.zip) |  |  |  |  |  |  |  |  |
| 3 | 39 | [Download](3/dataset.zip) |  |  |  |  |  |  |  |  |
| 4 | 28 | [Download](4/dataset.zip) |  |  |  |  |  |  |  |  |
| 5 | 46 | [Download](5/dataset.zip) |  |  |  |  |  |  |  |  |
| 6 | 215 | [Download](6/dataset.zip) |  |  |  |  |  |  |  |  |
| 7 | 17 | [Download](7/dataset.zip) |  |  |  |  |  |  |  |  |
| 8 | 29 | [Download](8/dataset.zip) |  |  |  |  |  |  |  |  |
| 9 | 10 | [Download](9/dataset.zip) |  |  |  |  |  |  |  |  |
| 10 | 21 | [Download](10/dataset.zip) |  |  |  |  |  |  |  |  |
| 11 | 9 | [Download](11/dataset.zip) |  |  |  |  |  |  |  |  |
| 12 | 189 | [Download](12/dataset.zip) |  |  |  |  |  |  |  |  |
| 13 | 17 | [Download](13/dataset.zip) |  |  |  |  |  |  |  |  |
| 14 | 214 | [Download](14/dataset.zip) |  |  |  |  |  |  |  |  |
| 15 | 31 | [Download](15/dataset.zip) |  |  |  |  |  |  |  |  |
| 16 | 42 | [Download](16/dataset.zip) |  |  |  |  |  |  |  |  |
| 17 | 151 | [Download](17/dataset.zip) |  |  |  |  |  |  |  |  |
| 18 | 32 | [Download](18/dataset.zip) |  |  |  |  |  |  |  |  |
| 19 | 43 | [Download](19/dataset.zip) |  |  |  |  |  |  |  |  |
| 20 | 235 | [Download](20/dataset.zip) |  |  |  |  |  |  |  |  |
| 21 | 27 | [Download](21/dataset.zip) |  |  |  |  |  |  |  |  |
| 22 | 134 | [Download](22/dataset.zip) |  |  |  |  |  |  |  |  |
| 23 | 12 | [Download](23/dataset.zip) |  |  |  |  |  |  |  |  |
| 24 | 23 | [Download](24/dataset.zip) |  |  |  |  |  |  |  |  |
| 25 | 11 | [Download](25/dataset.zip) |  |  |  |  |  |  |  |  |
| 26 | 18 | [Download](26/dataset.zip) |  |  |  |  |  |  |  |  |
| 27 | 14 | [Download](27/dataset.zip) |  |  |  |  |  |  |  |  |
| 28 | 38 | [Download](28/dataset.zip) |  |  |  |  |  |  |  |  |
| 29 | 18 | [Download](29/dataset.zip) |  |  |  |  |  |  |  |  |
| 30 | 215 | [Download](30/dataset.zip) |  |  |  |  |  |  |  |  |
| 31 | 134 | [Download](31/dataset.zip) |  |  |  |  |  |  |  |  |
| 32 | 29 | [Download](32/dataset.zip) |  |  |  |  |  |  |  |  |
| 33 | 20 | [Download](33/dataset.zip) |  |  |  |  |  |  |  |  |
| 34 | 20 | [Download](34/dataset.zip) |  |  |  |  |  |  |  |  |
| 35 | 6 | [Download](35/dataset.zip) |  |  |  |  |  |  | N/A | N/A |
| 36 | 38 | [Download](36/dataset.zip) |  |  |  |  |  |  |  |  |
| 37 | 9 | [Download](37/dataset.zip) |  |  |  |  |  |  |  |  |
| 38 | 14 | [Download](38/dataset.zip) |  |  |  |  |  |  |  |  |
| 39 | 10 | [Download](39/dataset.zip) |  |  |  |  |  |  |  |  |
| 40 | 24 | [Download](40/dataset.zip) |  |  |  |  |  |  |  |  |
| 41 | 45 | [Download](41/dataset.zip) |  |  |  |  |  |  |  |  |
| 42 | 10 | [Download](42/dataset.zip) |  |  |  |  |  |  |  |  |
| 43 | 7 | [Download](43/dataset.zip) |  |  |  |  |  |  |  | N/A |
| 44 | 28 | [Download](44/dataset.zip) |  |  |  |  |  |  |  |  |
| 45 | 42 | [Download](45/dataset.zip) |  |  |  |  |  |  |  |  |
| 46 | 183 | [Download](46/dataset.zip) |  |  |  |  |  |  |  |  |
| 47 | 28 | [Download](47/dataset.zip) |  |  |  |  |  |  |  |  |
| 48 | 10 | [Download](48/dataset.zip) |  |  |  |  |  |  |  |  |
| 49 | 50 | [Download](49/dataset.zip) |  |  |  |  |  |  |  |  |
| 50 | 38 | [Download](50/dataset.zip) |  |  |  |  |  |  |  |  |
| 51 | 30 | [Download](51/dataset.zip) |  |  |  |  |  |  |  |  |
| 52 | 60 | [Download](52/dataset.zip) |  |  |  |  |  |  |  |  |
| 53 | 37 | [Download](53/dataset.zip) |  |  |  |  |  |  |  |  |
| noise | 315 | [Download](-1/dataset.zip) |  |  |  |  |  |  |  |  |
| BangumiBase/shoujokagekirevuestarlight | [
"size_categories:1K<n<10K",
"license:mit",
"art",
"region:us"
]
| 2023-11-12T14:38:55+00:00 | {"license": "mit", "size_categories": ["1K<n<10K"], "tags": ["art"]} | 2023-11-12T16:31:58+00:00 | []
| []
| TAGS
#size_categories-1K<n<10K #license-mit #art #region-us
| Bangumi Image Base of Shoujo Kageki Revue Starlight
===================================================
This is the image base of bangumi Shoujo Kageki Revue Starlight, we detected 55 characters, 3633 images in total. The full dataset is here.
Please note that these image bases are not guaranteed to be 100% cleaned, they may be noisy actual. If you intend to manually train models using this dataset, we recommend performing necessary preprocessing on the downloaded dataset to eliminate potential noisy samples (approximately 1% probability).
Here is the characters' preview:
| []
| [
"TAGS\n#size_categories-1K<n<10K #license-mit #art #region-us \n"
]
| [
25
]
| [
"passage: TAGS\n#size_categories-1K<n<10K #license-mit #art #region-us \n"
]
|
94b823e2f10583becdb29edf5fff8087b502c53d | # Dataset Card for "agent_action_small_24_class"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | Raihan004/agent_action_small_24_class | [
"region:us"
]
| 2023-11-12T14:42:14+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "\u0995\u09c1\u0995\u09c1\u09b0_\u0995\u09ae\u09cd\u09aa\u09bf\u0989\u099f\u09be\u09b0_\u09ac\u09cd\u09af\u09ac\u09b9\u09be\u09b0_\u0995\u09b0\u09be", "1": "\u0995\u09c1\u0995\u09c1\u09b0_\u0996\u09be\u0993\u09af\u09bc\u09be", "2": "\u0995\u09c1\u0995\u09c1\u09b0_\u0996\u09c7\u09b2\u09be_\u0995\u09b0\u09be", "3": "\u0995\u09c1\u0995\u09c1\u09b0_\u0998\u09c1\u09ae\u09be\u09a8\u09c7\u09be", "4": "\u0995\u09c1\u0995\u09c1\u09b0_\u09aa\u09a1\u09bc\u09be", "5": "\u0995\u09c1\u0995\u09c1\u09b0_\u09aa\u09be\u09a8_\u0995\u09b0\u09be", "6": "\u099b\u09c7\u09b2\u09c7_\u0995\u09ae\u09cd\u09aa\u09bf\u0989\u099f\u09be\u09b0_\u09ac\u09cd\u09af\u09ac\u09b9\u09be\u09b0_\u0995\u09b0\u09be", "7": "\u099b\u09c7\u09b2\u09c7_\u0996\u09be\u0993\u09af\u09bc\u09be", "8": "\u099b\u09c7\u09b2\u09c7_\u0996\u09c7\u09b2\u09be_\u0995\u09b0\u09be", "9": "\u099b\u09c7\u09b2\u09c7_\u0998\u09c1\u09ae\u09be\u09a8\u09c7\u09be", "10": "\u099b\u09c7\u09b2\u09c7_\u09aa\u09a1\u09bc\u09be", "11": "\u099b\u09c7\u09b2\u09c7_\u09aa\u09be\u09a8_\u0995\u09b0\u09be", "12": "\u09ac\u09bf\u09a1\u09bc\u09be\u09b2_\u0995\u09ae\u09cd\u09aa\u09bf\u0989\u099f\u09be\u09b0_\u09ac\u09cd\u09af\u09ac\u09b9\u09be\u09b0_\u0995\u09b0\u09be", "13": "\u09ac\u09bf\u09a1\u09bc\u09be\u09b2_\u0996\u09be\u0993\u09af\u09bc\u09be", "14": "\u09ac\u09bf\u09a1\u09bc\u09be\u09b2_\u0996\u09c7\u09b2\u09be_\u0995\u09b0\u09be", "15": "\u09ac\u09bf\u09a1\u09bc\u09be\u09b2_\u0998\u09c1\u09ae\u09be\u09a8\u09c7\u09be", "16": "\u09ac\u09bf\u09a1\u09bc\u09be\u09b2_\u09aa\u09a1\u09bc\u09be", "17": "\u09ac\u09bf\u09a1\u09bc\u09be\u09b2_\u09aa\u09be\u09a8_\u0995\u09b0\u09be", "18": "\u09ae\u09c7\u09af\u09bc\u09c7_\u0995\u09ae\u09cd\u09aa\u09bf\u0989\u099f\u09be\u09b0_\u09ac\u09cd\u09af\u09ac\u09b9\u09be\u09b0_\u0995\u09b0\u09be", "19": "\u09ae\u09c7\u09af\u09bc\u09c7_\u0996\u09be\u0993\u09af\u09bc\u09be", "20": "\u09ae\u09c7\u09af\u09bc\u09c7_\u0996\u09c7\u09b2\u09be_\u0995\u09b0\u09be", "21": "\u09ae\u09c7\u09af\u09bc\u09c7_\u0998\u09c1\u09ae\u09be\u09a8\u09c7\u09be", "22": "\u09ae\u09c7\u09af\u09bc\u09c7_\u09aa\u09a1\u09bc\u09be", "23": "\u09ae\u09c7\u09af\u09bc\u09c7_\u09aa\u09be\u09a8_\u0995\u09b0\u09be"}}}}], "splits": [{"name": "train", "num_bytes": 250876999.3463382, "num_examples": 2855}, {"name": "test", "num_bytes": 39709633.3036618, "num_examples": 504}], "download_size": 290692368, "dataset_size": 290586632.65000004}} | 2023-11-12T15:15:15+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "agent_action_small_24_class"
More Information needed | [
"# Dataset Card for \"agent_action_small_24_class\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"agent_action_small_24_class\"\n\nMore Information needed"
]
| [
6,
20
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"agent_action_small_24_class\"\n\nMore Information needed"
]
|
761a1d2c173d59bbd05dfdbd611a7d2e9c7041fd | # Dataset Card for "pringles-blip-captions"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | Norod78/pringles-blip-captions | [
"region:us"
]
| 2023-11-12T14:57:18+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 221312765.0, "num_examples": 138}], "download_size": 221194479, "dataset_size": 221312765.0}} | 2023-11-12T18:36:09+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "pringles-blip-captions"
More Information needed | [
"# Dataset Card for \"pringles-blip-captions\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"pringles-blip-captions\"\n\nMore Information needed"
]
| [
6,
19
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"pringles-blip-captions\"\n\nMore Information needed"
]
|
f3030a54810d678bcf07bbf49210a807ef356014 | # Dataset Card for "text-guided-vc-google-tts-api-speech_tokenizer"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | hhhaaahhhaa/text-guided-vc-google-tts-api-speech_tokenizer | [
"region:us"
]
| 2023-11-12T15:06:00+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "file_id", "dtype": "string"}, {"name": "instruction", "dtype": "string"}, {"name": "transcription", "dtype": "string"}, {"name": "src_speech_tokenizer_0", "sequence": "int64"}, {"name": "src_speech_tokenizer_1", "sequence": "int64"}, {"name": "src_speech_tokenizer_2", "sequence": "int64"}, {"name": "src_speech_tokenizer_3", "sequence": "int64"}, {"name": "src_speech_tokenizer_4", "sequence": "int64"}, {"name": "src_speech_tokenizer_5", "sequence": "int64"}, {"name": "src_speech_tokenizer_6", "sequence": "int64"}, {"name": "src_speech_tokenizer_7", "sequence": "int64"}, {"name": "tgt_speech_tokenizer_0", "sequence": "int64"}, {"name": "tgt_speech_tokenizer_1", "sequence": "int64"}, {"name": "tgt_speech_tokenizer_2", "sequence": "int64"}, {"name": "tgt_speech_tokenizer_3", "sequence": "int64"}, {"name": "tgt_speech_tokenizer_4", "sequence": "int64"}, {"name": "tgt_speech_tokenizer_5", "sequence": "int64"}, {"name": "tgt_speech_tokenizer_6", "sequence": "int64"}, {"name": "tgt_speech_tokenizer_7", "sequence": "int64"}], "splits": [{"name": "train", "num_bytes": 2476215704, "num_examples": 90000}, {"name": "validation", "num_bytes": 135757316, "num_examples": 5000}, {"name": "test", "num_bytes": 139761511, "num_examples": 5000}], "download_size": 147633674, "dataset_size": 2751734531}} | 2023-11-12T17:08:03+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "text-guided-vc-google-tts-api-speech_tokenizer"
More Information needed | [
"# Dataset Card for \"text-guided-vc-google-tts-api-speech_tokenizer\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"text-guided-vc-google-tts-api-speech_tokenizer\"\n\nMore Information needed"
]
| [
6,
30
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"text-guided-vc-google-tts-api-speech_tokenizer\"\n\nMore Information needed"
]
|
b06c05ad585863e632ffe600fe051dea1eb7c1e8 | # Dataset Card for "typed_final_chart_to_table"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | nourheshamshaheen/typed_final_chart_to_table | [
"region:us"
]
| 2023-11-12T15:38:39+00:00 | {"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "text", "dtype": "string"}, {"name": "type", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 25058975.0, "num_examples": 562}, {"name": "train", "num_bytes": 101059498.385, "num_examples": 2245}], "download_size": 108892387, "dataset_size": 126118473.385}, "configs": [{"config_name": "default", "data_files": [{"split": "test", "path": "data/test-*"}, {"split": "train", "path": "data/train-*"}]}]} | 2023-11-12T15:42:12+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "typed_final_chart_to_table"
More Information needed | [
"# Dataset Card for \"typed_final_chart_to_table\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"typed_final_chart_to_table\"\n\nMore Information needed"
]
| [
6,
21
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"typed_final_chart_to_table\"\n\nMore Information needed"
]
|
23dec33cbd5b28836c25d1b785bf00f108a8ab63 | This <span style="color:teal;">parallel corpus </span> contains <span style="color:teal;">26240</span> aligned sentence pairs <span style="color:teal;">Nande-French</span> in a <span style="color:teal;">90:10</span> split for the train and the test sets. It has been mainly used to fine-tune the <span style="color:teal;"> t5-base </span> pretrained model for the development of <a href="https://huggingface.co/SalomonMetre13/nnd_fr_mt_v3" style="color:green;">this translation model </a> | SalomonMetre13/nnd_fr_26k | [
"task_categories:translation",
"size_categories:10K<n<100K",
"language:nnd",
"license:mit",
"region:us"
]
| 2023-11-12T15:40:13+00:00 | {"language": ["nnd"], "license": "mit", "size_categories": ["10K<n<100K"], "task_categories": ["translation"]} | 2023-11-20T09:08:05+00:00 | []
| [
"nnd"
]
| TAGS
#task_categories-translation #size_categories-10K<n<100K #language-West Ambae #license-mit #region-us
| This <span style="color:teal;">parallel corpus </span> contains <span style="color:teal;">26240</span> aligned sentence pairs <span style="color:teal;">Nande-French</span> in a <span style="color:teal;">90:10</span> split for the train and the test sets. It has been mainly used to fine-tune the <span style="color:teal;"> t5-base </span> pretrained model for the development of <a href="URL style="color:green;">this translation model </a> | []
| [
"TAGS\n#task_categories-translation #size_categories-10K<n<100K #language-West Ambae #license-mit #region-us \n"
]
| [
39
]
| [
"passage: TAGS\n#task_categories-translation #size_categories-10K<n<100K #language-West Ambae #license-mit #region-us \n"
]
|
70751ac7985317c2fcbbac9d6a134a20d4ecfeea | # Dataset Card for "vietnamese_cultural"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | nguyenth1312/vietnamese_cultural | [
"region:us"
]
| 2023-11-12T15:40:54+00:00 | {"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "Unnamed: 0", "dtype": "int64"}, {"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 423279027.0, "num_examples": 144}], "download_size": 371389185, "dataset_size": 423279027.0}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2023-11-12T15:41:24+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "vietnamese_cultural"
More Information needed | [
"# Dataset Card for \"vietnamese_cultural\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"vietnamese_cultural\"\n\nMore Information needed"
]
| [
6,
15
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"vietnamese_cultural\"\n\nMore Information needed"
]
|
08f4e743dbf9b2d2a1557c5e0570f4e42850d6b1 | # Dataset Card for "final_chart_to_table"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | nourheshamshaheen/final_chart_to_table | [
"region:us"
]
| 2023-11-12T15:42:27+00:00 | {"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "text", "dtype": "string"}, {"name": "type", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 101059151.385, "num_examples": 2245}, {"name": "test", "num_bytes": 25058843.0, "num_examples": 562}], "download_size": 108890579, "dataset_size": 126117994.385}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}]} | 2023-11-12T15:42:54+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "final_chart_to_table"
More Information needed | [
"# Dataset Card for \"final_chart_to_table\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"final_chart_to_table\"\n\nMore Information needed"
]
| [
6,
18
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"final_chart_to_table\"\n\nMore Information needed"
]
|
9e41180669ccc7aa070c8d8adc0238ab3fc55451 | # Dataset Card for "qasimplesi"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | zaanind/qasimplesi | [
"region:us"
]
| 2023-11-12T15:46:17+00:00 | {"dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 17208, "num_examples": 28}], "download_size": 9011, "dataset_size": 17208}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2023-11-12T15:46:19+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "qasimplesi"
More Information needed | [
"# Dataset Card for \"qasimplesi\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"qasimplesi\"\n\nMore Information needed"
]
| [
6,
13
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"qasimplesi\"\n\nMore Information needed"
]
|
f0d57f60e743f0a92e37ba8f1d362d7b76fe7f6c | # Dataset Card for "bw_spec_cls_4_00_noise_200"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | arieg/bw_spec_cls_4_00_noise_200 | [
"region:us"
]
| 2023-11-12T15:47:51+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "10", "1": "140", "2": "2", "3": "5"}}}}], "splits": [{"name": "train", "num_bytes": 44730986.0, "num_examples": 800}, {"name": "test", "num_bytes": 1122375.0, "num_examples": 20}], "download_size": 24737574, "dataset_size": 45853361.0}} | 2023-11-12T15:47:56+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "bw_spec_cls_4_00_noise_200"
More Information needed | [
"# Dataset Card for \"bw_spec_cls_4_00_noise_200\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"bw_spec_cls_4_00_noise_200\"\n\nMore Information needed"
]
| [
6,
25
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"bw_spec_cls_4_00_noise_200\"\n\nMore Information needed"
]
|
b4d62f5a7d4e20d7b2a887c2d7d49772e08ca18c | 
# Dataset Card for dataforge-economics
## Table of Contents
- [Overview](#overview)
- [Dataset Description](#dataset-description)
- [Data Collection and Synthesis](#data-collection-and-synthesis)
- [Data Structure](#data-structure)
- [Licensing, Privacy, and Ethics](#licensing-privacy-and-ethics)
- [Access](#access)
- [Usage](#usage)
- [Citation](#citation)
- [Contributions](#contributions)
## Overview
This dataset, `teknium/dataforge-economics`, is a specialized collection of 1,000 synthetic examples in the field of economics. It has been generated using OpenAI's GPT-4 and a custom data synthesis pipeline named DataForge, developed by me.
## Dataset Description
### Data Collection and Synthesis
The data in `teknium/dataforge-economics` has been synthetically generated using OpenAI's GPT-4 language model. The synthesis process was enhanced and structured using the DataForge pipeline, which incorporates domain-specific knowledge and ensures relevance in economics topics.
### Data Structure
- **Size of dataset:** 1000 examples
- **Type of data:** Textual (Economics domain-specific)
- **Data format:** JSON
- **Fields:**
- - id: a randomly generated uuid
- conversations: single turn human & gpt turns in sharegpt format
- source: the dataset name itself, for metadata purposes when merging with others
- topic: the sub-topic for the domain
- system_prompt: type of system prompt used for generating the response.
## Licensing, Privacy, and Ethics
- **License:** MIT License
- **Special Considerations:** This datasest is purely generated from GPT-4 data, some information may be incorrect or invalid.
- **Privacy:** As the dataset is synthetically generated, it does not contain any real individual's data.
## Access
- **Availability:** General Access
## Usage
This dataset is a domain specialist dataset, the first to use my new pipeline called Data Forge, which can create domain expert knowledge (and tasks, as seen in the Trismegistus occult dataset)
This dataset was a proof of concept to improve upon Orca model's economics expertise, which surpassed my custom benchmark for economics when finetuned over stable beluga.
| teknium/dataforge-economics | [
"language:eng",
"license:mit",
"economics",
"region:us"
]
| 2023-11-12T15:51:56+00:00 | {"language": ["eng"], "license": "mit", "pretty_name": "DataForge-Economics", "tags": ["economics"]} | 2023-11-12T23:39:30+00:00 | []
| [
"eng"
]
| TAGS
#language-English #license-mit #economics #region-us
| !image/png
# Dataset Card for dataforge-economics
## Table of Contents
- Overview
- Dataset Description
- Data Collection and Synthesis
- Data Structure
- Licensing, Privacy, and Ethics
- Access
- Usage
- Citation
- Contributions
## Overview
This dataset, 'teknium/dataforge-economics', is a specialized collection of 1,000 synthetic examples in the field of economics. It has been generated using OpenAI's GPT-4 and a custom data synthesis pipeline named DataForge, developed by me.
## Dataset Description
### Data Collection and Synthesis
The data in 'teknium/dataforge-economics' has been synthetically generated using OpenAI's GPT-4 language model. The synthesis process was enhanced and structured using the DataForge pipeline, which incorporates domain-specific knowledge and ensures relevance in economics topics.
### Data Structure
- Size of dataset: 1000 examples
- Type of data: Textual (Economics domain-specific)
- Data format: JSON
- Fields:
- - id: a randomly generated uuid
- conversations: single turn human & gpt turns in sharegpt format
- source: the dataset name itself, for metadata purposes when merging with others
- topic: the sub-topic for the domain
- system_prompt: type of system prompt used for generating the response.
## Licensing, Privacy, and Ethics
- License: MIT License
- Special Considerations: This datasest is purely generated from GPT-4 data, some information may be incorrect or invalid.
- Privacy: As the dataset is synthetically generated, it does not contain any real individual's data.
## Access
- Availability: General Access
## Usage
This dataset is a domain specialist dataset, the first to use my new pipeline called Data Forge, which can create domain expert knowledge (and tasks, as seen in the Trismegistus occult dataset)
This dataset was a proof of concept to improve upon Orca model's economics expertise, which surpassed my custom benchmark for economics when finetuned over stable beluga.
| [
"# Dataset Card for dataforge-economics",
"## Table of Contents\n- Overview\n- Dataset Description\n - Data Collection and Synthesis\n - Data Structure\n- Licensing, Privacy, and Ethics\n- Access\n- Usage\n- Citation\n- Contributions",
"## Overview\n\nThis dataset, 'teknium/dataforge-economics', is a specialized collection of 1,000 synthetic examples in the field of economics. It has been generated using OpenAI's GPT-4 and a custom data synthesis pipeline named DataForge, developed by me.",
"## Dataset Description",
"### Data Collection and Synthesis\n\nThe data in 'teknium/dataforge-economics' has been synthetically generated using OpenAI's GPT-4 language model. The synthesis process was enhanced and structured using the DataForge pipeline, which incorporates domain-specific knowledge and ensures relevance in economics topics.",
"### Data Structure\n\n- Size of dataset: 1000 examples\n- Type of data: Textual (Economics domain-specific)\n- Data format: JSON\n- Fields: \n- - id: a randomly generated uuid\n - conversations: single turn human & gpt turns in sharegpt format\n - source: the dataset name itself, for metadata purposes when merging with others\n - topic: the sub-topic for the domain\n - system_prompt: type of system prompt used for generating the response.",
"## Licensing, Privacy, and Ethics\n\n- License: MIT License\n- Special Considerations: This datasest is purely generated from GPT-4 data, some information may be incorrect or invalid.\n- Privacy: As the dataset is synthetically generated, it does not contain any real individual's data.",
"## Access\n\n- Availability: General Access",
"## Usage\n\nThis dataset is a domain specialist dataset, the first to use my new pipeline called Data Forge, which can create domain expert knowledge (and tasks, as seen in the Trismegistus occult dataset)\nThis dataset was a proof of concept to improve upon Orca model's economics expertise, which surpassed my custom benchmark for economics when finetuned over stable beluga."
]
| [
"TAGS\n#language-English #license-mit #economics #region-us \n",
"# Dataset Card for dataforge-economics",
"## Table of Contents\n- Overview\n- Dataset Description\n - Data Collection and Synthesis\n - Data Structure\n- Licensing, Privacy, and Ethics\n- Access\n- Usage\n- Citation\n- Contributions",
"## Overview\n\nThis dataset, 'teknium/dataforge-economics', is a specialized collection of 1,000 synthetic examples in the field of economics. It has been generated using OpenAI's GPT-4 and a custom data synthesis pipeline named DataForge, developed by me.",
"## Dataset Description",
"### Data Collection and Synthesis\n\nThe data in 'teknium/dataforge-economics' has been synthetically generated using OpenAI's GPT-4 language model. The synthesis process was enhanced and structured using the DataForge pipeline, which incorporates domain-specific knowledge and ensures relevance in economics topics.",
"### Data Structure\n\n- Size of dataset: 1000 examples\n- Type of data: Textual (Economics domain-specific)\n- Data format: JSON\n- Fields: \n- - id: a randomly generated uuid\n - conversations: single turn human & gpt turns in sharegpt format\n - source: the dataset name itself, for metadata purposes when merging with others\n - topic: the sub-topic for the domain\n - system_prompt: type of system prompt used for generating the response.",
"## Licensing, Privacy, and Ethics\n\n- License: MIT License\n- Special Considerations: This datasest is purely generated from GPT-4 data, some information may be incorrect or invalid.\n- Privacy: As the dataset is synthetically generated, it does not contain any real individual's data.",
"## Access\n\n- Availability: General Access",
"## Usage\n\nThis dataset is a domain specialist dataset, the first to use my new pipeline called Data Forge, which can create domain expert knowledge (and tasks, as seen in the Trismegistus occult dataset)\nThis dataset was a proof of concept to improve upon Orca model's economics expertise, which surpassed my custom benchmark for economics when finetuned over stable beluga."
]
| [
18,
11,
47,
70,
4,
77,
115,
69,
9,
90
]
| [
"passage: TAGS\n#language-English #license-mit #economics #region-us \n# Dataset Card for dataforge-economics## Table of Contents\n- Overview\n- Dataset Description\n - Data Collection and Synthesis\n - Data Structure\n- Licensing, Privacy, and Ethics\n- Access\n- Usage\n- Citation\n- Contributions## Overview\n\nThis dataset, 'teknium/dataforge-economics', is a specialized collection of 1,000 synthetic examples in the field of economics. It has been generated using OpenAI's GPT-4 and a custom data synthesis pipeline named DataForge, developed by me.## Dataset Description### Data Collection and Synthesis\n\nThe data in 'teknium/dataforge-economics' has been synthetically generated using OpenAI's GPT-4 language model. The synthesis process was enhanced and structured using the DataForge pipeline, which incorporates domain-specific knowledge and ensures relevance in economics topics.### Data Structure\n\n- Size of dataset: 1000 examples\n- Type of data: Textual (Economics domain-specific)\n- Data format: JSON\n- Fields: \n- - id: a randomly generated uuid\n - conversations: single turn human & gpt turns in sharegpt format\n - source: the dataset name itself, for metadata purposes when merging with others\n - topic: the sub-topic for the domain\n - system_prompt: type of system prompt used for generating the response.## Licensing, Privacy, and Ethics\n\n- License: MIT License\n- Special Considerations: This datasest is purely generated from GPT-4 data, some information may be incorrect or invalid.\n- Privacy: As the dataset is synthetically generated, it does not contain any real individual's data.## Access\n\n- Availability: General Access"
]
|
c526528e42514cc5d152c09c3770f8df59bc56c1 | # Dataset Card for "bw_spec_cls_8_00_noise_200"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | arieg/bw_spec_cls_8_00_noise_200 | [
"region:us"
]
| 2023-11-12T16:14:08+00:00 | {"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "10", "1": "140", "2": "2", "3": "5", "4": "141", "5": "190", "6": "193", "7": "194"}}}}], "splits": [{"name": "train", "num_bytes": 99107527.0, "num_examples": 1700}], "download_size": 51930526, "dataset_size": 99107527.0}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2023-11-12T16:14:16+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "bw_spec_cls_8_00_noise_200"
More Information needed | [
"# Dataset Card for \"bw_spec_cls_8_00_noise_200\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"bw_spec_cls_8_00_noise_200\"\n\nMore Information needed"
]
| [
6,
25
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"bw_spec_cls_8_00_noise_200\"\n\nMore Information needed"
]
|
ddb448f12cd7b52a417158286c2b9236063ee45b | # Dataset Card for "stripes"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | orgcatorg/stripes | [
"region:us"
]
| 2023-11-12T16:15:12+00:00 | {"configs": [{"config_name": "Africa", "data_files": [{"split": "train", "path": "Africa/train-*"}]}, {"config_name": "Asia-Pacific", "data_files": [{"split": "train", "path": "Asia-Pacific/train-*"}]}, {"config_name": "Europe", "data_files": [{"split": "train", "path": "Europe/train-*"}]}, {"config_name": "Middle East", "data_files": [{"split": "train", "path": "Middle East/train-*"}]}, {"config_name": "US", "data_files": [{"split": "train", "path": "US/train-*"}]}], "dataset_info": [{"config_name": "Africa", "features": [{"name": "content", "dtype": "string"}, {"name": "title", "dtype": "string"}, {"name": "source_link", "dtype": "string"}, {"name": "description", "dtype": "string"}, {"name": "date", "dtype": "string"}, {"name": "image", "dtype": "string"}, {"name": "image_caption", "dtype": "string"}, {"name": "category", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 583003, "num_examples": 119}], "download_size": 354654, "dataset_size": 583003}, {"config_name": "Asia-Pacific", "features": [{"name": "content", "dtype": "string"}, {"name": "title", "dtype": "string"}, {"name": "source_link", "dtype": "string"}, {"name": "description", "dtype": "string"}, {"name": "date", "dtype": "string"}, {"name": "image", "dtype": "string"}, {"name": "image_caption", "dtype": "string"}, {"name": "category", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 1720541, "num_examples": 395}], "download_size": 1017258, "dataset_size": 1720541}, {"config_name": "Europe", "features": [{"name": "content", "dtype": "string"}, {"name": "title", "dtype": "string"}, {"name": "source_link", "dtype": "string"}, {"name": "description", "dtype": "string"}, {"name": "date", "dtype": "string"}, {"name": "image", "dtype": "string"}, {"name": "image_caption", "dtype": "string"}, {"name": "category", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 3813635, "num_examples": 750}], "download_size": 2245719, "dataset_size": 3813635}, {"config_name": "Middle East", "features": [{"name": "content", "dtype": "string"}, {"name": "title", "dtype": "string"}, {"name": "source_link", "dtype": "string"}, {"name": "description", "dtype": "string"}, {"name": "date", "dtype": "string"}, {"name": "image", "dtype": "string"}, {"name": "image_caption", "dtype": "string"}, {"name": "category", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 4264778, "num_examples": 652}], "download_size": 2441348, "dataset_size": 4264778}, {"config_name": "US", "features": [{"name": "content", "dtype": "string"}, {"name": "title", "dtype": "string"}, {"name": "source_link", "dtype": "string"}, {"name": "description", "dtype": "string"}, {"name": "date", "dtype": "string"}, {"name": "image", "dtype": "string"}, {"name": "image_caption", "dtype": "string"}, {"name": "category", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 4426382, "num_examples": 777}], "download_size": 2629415, "dataset_size": 4426382}]} | 2024-02-17T04:20:43+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "stripes"
More Information needed | [
"# Dataset Card for \"stripes\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"stripes\"\n\nMore Information needed"
]
| [
6,
12
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"stripes\"\n\nMore Information needed"
]
|
a99716185b7a35ead1d86ede1ec181d136462fc2 | # Dataset Card for "bw_spec_cls_100_00_noise_200"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | arieg/bw_spec_cls_100_00_noise_200 | [
"region:us"
]
| 2023-11-12T16:21:05+00:00 | {"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "10", "1": "140", "2": "2", "3": "5", "4": "141", "5": "190", "6": "193", "7": "194", "8": "197", "9": "200", "10": "203", "11": "204", "12": "207", "13": "210", "14": "211", "15": "212", "16": "213", "17": "255", "18": "256", "19": "368", "20": "424", "21": "534", "22": "540", "23": "546", "24": "574", "25": "615", "26": "620", "27": "621", "28": "625", "29": "666", "30": "667", "31": "676", "32": "694", "33": "695", "34": "714", "35": "715", "36": "716", "37": "718", "38": "777", "39": "814", "40": "821", "41": "822", "42": "825", "43": "853", "44": "897", "45": "995", "46": "997", "47": "998", "48": "1039", "49": "1040", "50": "1082", "51": "1083", "52": "1102", "53": "1193", "54": "1195", "55": "1196", "56": "1197", "57": "1270", "58": "1276", "59": "1277", "60": "1278", "61": "1417", "62": "1427", "63": "1443", "64": "1482", "65": "1510", "66": "1544", "67": "1642", "68": "1644", "69": "1649", "70": "1661", "71": "1663", "72": "1666", "73": "1673", "74": "1680", "75": "1681", "76": "1682", "77": "1683", "78": "1684", "79": "1685", "80": "1686", "81": "1687", "82": "1688", "83": "1689", "84": "1701", "85": "1702", "86": "1703", "87": "1704", "88": "1706", "89": "1720", "90": "1732", "91": "1733", "92": "1735", "93": "1736", "94": "1883", "95": "1891", "96": "1924", "97": "1925", "98": "1929", "99": "1930"}}}}], "splits": [{"name": "train", "num_bytes": 1159801335.0, "num_examples": 20560}], "download_size": 603798465, "dataset_size": 1159801335.0}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2023-11-12T16:22:08+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "bw_spec_cls_100_00_noise_200"
More Information needed | [
"# Dataset Card for \"bw_spec_cls_100_00_noise_200\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"bw_spec_cls_100_00_noise_200\"\n\nMore Information needed"
]
| [
6,
25
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"bw_spec_cls_100_00_noise_200\"\n\nMore Information needed"
]
|
c627387165ffe9fd3d6e95e8f868a64d8b917cd3 | # Dataset Card for "commitpackmeta-gitmoji"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | 1aurent/commitpackmeta-gitmoji | [
"region:us"
]
| 2023-11-12T16:23:31+00:00 | {"dataset_info": {"features": [{"name": "text", "dtype": "string"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "\u00a9", "1": "\u00ae", "2": "\u203c", "3": "\u2049", "4": "\u2139", "5": "\u2194", "6": "\u2197", "7": "\u2199", "8": "\u21a9", "9": "\u231a", "10": "\u231b", "11": "\u2328", "12": "\u23e9", "13": "\u23ea", "14": "\u23eb", "15": "\u23ed", "16": "\u23ee", "17": "\u23f0", "18": "\u23f1", "19": "\u23f2", "20": "\u23f3", "21": "\u23f8", "22": "\u24c2", "23": "\u25b6", "24": "\u25c0", "25": "\u2600", "26": "\u2601", "27": "\u2602", "28": "\u2603", "29": "\u2604", "30": "\u260e", "31": "\u2611", "32": "\u2614", "33": "\u2615", "34": "\u261d", "35": "\u2620", "36": "\u2622", "37": "\u262e", "38": "\u262f", "39": "\u2639", "40": "\u263a", "41": "\u2649", "42": "\u264a", "43": "\u2653", "44": "\u2660", "45": "\u2663", "46": "\u2665", "47": "\u2666", "48": "\u2668", "49": "\u267b", "50": "\u267f", "51": "\u2692", "52": "\u2693", "53": "\u2694", "54": "\u2696", "55": "\u2697", "56": "\u2699", "57": "\u269b", "58": "\u269c", "59": "\u26a0", "60": "\u26a1", "61": "\u26aa", "62": "\u26ab", "63": "\u26b0", "64": "\u26bd", "65": "\u26be", "66": "\u26c4", "67": "\u26c5", "68": "\u26cf", "69": "\u26d1", "70": "\u26d3", "71": "\u26d4", "72": "\u26e9", "73": "\u26ea", "74": "\u26f1", "75": "\u26f2", "76": "\u26f3", "77": "\u26f4", "78": "\u26f5", "79": "\u26f7", "80": "\u26f8", "81": "\u26f9", "82": "\u26fa", "83": "\u26fd", "84": "\u2702", "85": "\u2705", "86": "\u2708", "87": "\u2709", "88": "\u270a", "89": "\u270b", "90": "\u270c", "91": "\u270d", "92": "\u270f", "93": "\u2712", "94": "\u2714", "95": "\u2716", "96": "\u271d", "97": "\u2728", "98": "\u2733", "99": "\u2734", "100": "\u2744", "101": "\u2747", "102": "\u274c", "103": "\u274e", "104": "\u2753", "105": "\u2757", "106": "\u2764", "107": "\u2795", "108": "\u2796", "109": "\u27a1", "110": "\u27b0", "111": "\u27bf", "112": "\u2934", "113": "\u2935", "114": "\u2b05", "115": "\u2b06", "116": "\u2b07", "117": "\u2b1b", "118": "\u2b1c", "119": "\u2b50", "120": "\u2b55", "121": "\u3030", "122": "\u303d", "123": "\ud83c\udccf", "124": "\ud83c\udd70", "125": "\ud83c\udd8e", "126": "\ud83c\udd91", "127": "\ud83c\udd92", "128": "\ud83c\udd93", "129": "\ud83c\udd95", "130": "\ud83c\udd99", "131": "\ud83c\udd9a", "132": "\ud83c\ude51", "133": "\ud83c\udf00", "134": "\ud83c\udf01", "135": "\ud83c\udf02", "136": "\ud83c\udf03", "137": "\ud83c\udf04", "138": "\ud83c\udf05", "139": "\ud83c\udf07", "140": "\ud83c\udf08", "141": "\ud83c\udf09", "142": "\ud83c\udf0a", "143": "\ud83c\udf0b", "144": "\ud83c\udf0c", "145": "\ud83c\udf0d", "146": "\ud83c\udf0e", "147": "\ud83c\udf0f", "148": "\ud83c\udf10", "149": "\ud83c\udf11", "150": "\ud83c\udf12", "151": "\ud83c\udf13", "152": "\ud83c\udf14", "153": "\ud83c\udf15", "154": "\ud83c\udf16", "155": "\ud83c\udf17", "156": "\ud83c\udf18", "157": "\ud83c\udf19", "158": "\ud83c\udf1a", "159": "\ud83c\udf1d", "160": "\ud83c\udf1e", "161": "\ud83c\udf1f", "162": "\ud83c\udf25", "163": "\ud83c\udf26", "164": "\ud83c\udf27", "165": "\ud83c\udf28", "166": "\ud83c\udf29", "167": "\ud83c\udf2a", "168": "\ud83c\udf2b", "169": "\ud83c\udf2d", "170": "\ud83c\udf2e", "171": "\ud83c\udf2f", "172": "\ud83c\udf30", "173": "\ud83c\udf31", "174": "\ud83c\udf32", "175": "\ud83c\udf33", "176": "\ud83c\udf34", "177": "\ud83c\udf35", "178": "\ud83c\udf36", "179": "\ud83c\udf37", "180": "\ud83c\udf38", "181": "\ud83c\udf39", "182": "\ud83c\udf3a", "183": "\ud83c\udf3b", "184": "\ud83c\udf3c", "185": "\ud83c\udf3d", "186": "\ud83c\udf3e", "187": "\ud83c\udf3f", "188": "\ud83c\udf40", "189": "\ud83c\udf41", "190": "\ud83c\udf42", "191": "\ud83c\udf43", "192": "\ud83c\udf44", "193": "\ud83c\udf45", "194": "\ud83c\udf46", "195": "\ud83c\udf47", "196": "\ud83c\udf48", "197": "\ud83c\udf49", "198": "\ud83c\udf4a", "199": "\ud83c\udf4b", "200": "\ud83c\udf4c", "201": "\ud83c\udf4d", "202": "\ud83c\udf4e", "203": "\ud83c\udf4f", "204": "\ud83c\udf50", "205": "\ud83c\udf51", "206": "\ud83c\udf52", "207": "\ud83c\udf53", "208": "\ud83c\udf54", "209": "\ud83c\udf55", "210": "\ud83c\udf56", "211": "\ud83c\udf57", "212": "\ud83c\udf58", "213": "\ud83c\udf59", "214": "\ud83c\udf5b", "215": "\ud83c\udf5c", "216": "\ud83c\udf5d", "217": "\ud83c\udf5e", "218": "\ud83c\udf5f", "219": "\ud83c\udf60", "220": "\ud83c\udf61", "221": "\ud83c\udf62", "222": "\ud83c\udf63", "223": "\ud83c\udf64", "224": "\ud83c\udf65", "225": "\ud83c\udf66", "226": "\ud83c\udf67", "227": "\ud83c\udf68", "228": "\ud83c\udf69", "229": "\ud83c\udf6a", "230": "\ud83c\udf6b", "231": "\ud83c\udf6c", "232": "\ud83c\udf6d", "233": "\ud83c\udf6e", "234": "\ud83c\udf6f", "235": "\ud83c\udf70", "236": "\ud83c\udf71", "237": "\ud83c\udf73", "238": "\ud83c\udf74", "239": "\ud83c\udf75", "240": "\ud83c\udf76", "241": "\ud83c\udf77", "242": "\ud83c\udf79", "243": "\ud83c\udf7a", "244": "\ud83c\udf7b", "245": "\ud83c\udf7c", "246": "\ud83c\udf7d", "247": "\ud83c\udf7e", "248": "\ud83c\udf7f", "249": "\ud83c\udf80", "250": "\ud83c\udf81", "251": "\ud83c\udf82", "252": "\ud83c\udf83", "253": "\ud83c\udf84", "254": "\ud83c\udf85", "255": "\ud83c\udf86", "256": "\ud83c\udf87", "257": "\ud83c\udf88", "258": "\ud83c\udf89", "259": "\ud83c\udf8a", "260": "\ud83c\udf8b", "261": "\ud83c\udf8c", "262": "\ud83c\udf90", "263": "\ud83c\udf92", "264": "\ud83c\udf93", "265": "\ud83c\udf96", "266": "\ud83c\udf97", "267": "\ud83c\udf99", "268": "\ud83c\udf9b", "269": "\ud83c\udf9e", "270": "\ud83c\udf9f", "271": "\ud83c\udfa0", "272": "\ud83c\udfa1", "273": "\ud83c\udfa2", "274": "\ud83c\udfa3", "275": "\ud83c\udfa4", "276": "\ud83c\udfa5", "277": "\ud83c\udfa6", "278": "\ud83c\udfa7", "279": "\ud83c\udfa8", "280": "\ud83c\udfa9", "281": "\ud83c\udfaa", "282": "\ud83c\udfab", "283": "\ud83c\udfac", "284": "\ud83c\udfad", "285": "\ud83c\udfae", "286": "\ud83c\udfaf", "287": "\ud83c\udfb0", "288": "\ud83c\udfb1", "289": "\ud83c\udfb2", "290": "\ud83c\udfb3", "291": "\ud83c\udfb4", "292": "\ud83c\udfb5", "293": "\ud83c\udfb6", "294": "\ud83c\udfb7", "295": "\ud83c\udfb8", "296": "\ud83c\udfb9", "297": "\ud83c\udfba", "298": "\ud83c\udfbb", "299": "\ud83c\udfbc", "300": "\ud83c\udfbd", "301": "\ud83c\udfbe", "302": "\ud83c\udfbf", "303": "\ud83c\udfc0", "304": "\ud83c\udfc1", "305": "\ud83c\udfc2", "306": "\ud83c\udfc3", "307": "\ud83c\udfc4", "308": "\ud83c\udfc5", "309": "\ud83c\udfc6", "310": "\ud83c\udfc7", "311": "\ud83c\udfc8", "312": "\ud83c\udfc9", "313": "\ud83c\udfca", "314": "\ud83c\udfcb", "315": "\ud83c\udfcc", "316": "\ud83c\udfce", "317": "\ud83c\udfcf", "318": "\ud83c\udfd0", "319": "\ud83c\udfd1", "320": "\ud83c\udfd3", "321": "\ud83c\udfd4", "322": "\ud83c\udfd6", "323": "\ud83c\udfd7", "324": "\ud83c\udfda", "325": "\ud83c\udfdb", "326": "\ud83c\udfdd", "327": "\ud83c\udfde", "328": "\ud83c\udfe0", "329": "\ud83c\udfe1", "330": "\ud83c\udfe2", "331": "\ud83c\udfe3", "332": "\ud83c\udfe4", "333": "\ud83c\udfe5", "334": "\ud83c\udfe6", "335": "\ud83c\udfe8", "336": "\ud83c\udfe9", "337": "\ud83c\udfea", "338": "\ud83c\udfeb", "339": "\ud83c\udfec", "340": "\ud83c\udfed", "341": "\ud83c\udfee", "342": "\ud83c\udfef", "343": "\ud83c\udff0", "344": "\ud83c\udff3", "345": "\ud83c\udff4", "346": "\ud83c\udff5", "347": "\ud83c\udff7", "348": "\ud83c\udff9", "349": "\ud83c\udffc", "350": "\ud83d\udc00", "351": "\ud83d\udc01", "352": "\ud83d\udc02", "353": "\ud83d\udc03", "354": "\ud83d\udc04", "355": "\ud83d\udc05", "356": "\ud83d\udc06", "357": "\ud83d\udc07", "358": "\ud83d\udc08", "359": "\ud83d\udc09", "360": "\ud83d\udc0a", "361": "\ud83d\udc0b", "362": "\ud83d\udc0c", "363": "\ud83d\udc0d", "364": "\ud83d\udc0e", "365": "\ud83d\udc0f", "366": "\ud83d\udc10", "367": "\ud83d\udc11", "368": "\ud83d\udc12", "369": "\ud83d\udc13", "370": "\ud83d\udc14", "371": "\ud83d\udc15", "372": "\ud83d\udc16", "373": "\ud83d\udc17", "374": "\ud83d\udc18", "375": "\ud83d\udc19", "376": "\ud83d\udc1a", "377": "\ud83d\udc1b", "378": "\ud83d\udc1c", "379": "\ud83d\udc1d", "380": "\ud83d\udc1e", "381": "\ud83d\udc1f", "382": "\ud83d\udc20", "383": "\ud83d\udc21", "384": "\ud83d\udc22", "385": "\ud83d\udc23", "386": "\ud83d\udc24", "387": "\ud83d\udc25", "388": "\ud83d\udc26", "389": "\ud83d\udc27", "390": "\ud83d\udc28", "391": "\ud83d\udc29", "392": "\ud83d\udc2a", "393": "\ud83d\udc2b", "394": "\ud83d\udc2c", "395": "\ud83d\udc2d", "396": "\ud83d\udc2e", "397": "\ud83d\udc2f", "398": "\ud83d\udc30", "399": "\ud83d\udc31", "400": "\ud83d\udc32", "401": "\ud83d\udc33", "402": "\ud83d\udc34", "403": "\ud83d\udc35", "404": "\ud83d\udc36", "405": "\ud83d\udc37", "406": "\ud83d\udc38", "407": "\ud83d\udc39", "408": "\ud83d\udc3a", "409": "\ud83d\udc3b", "410": "\ud83d\udc3c", "411": "\ud83d\udc3d", "412": "\ud83d\udc3e", "413": "\ud83d\udc3f", "414": "\ud83d\udc40", "415": "\ud83d\udc41", "416": "\ud83d\udc42", "417": "\ud83d\udc43", "418": "\ud83d\udc44", "419": "\ud83d\udc45", "420": "\ud83d\udc46", "421": "\ud83d\udc47", "422": "\ud83d\udc48", "423": "\ud83d\udc49", "424": "\ud83d\udc4a", "425": "\ud83d\udc4b", "426": "\ud83d\udc4c", "427": "\ud83d\udc4d", "428": "\ud83d\udc4e", "429": "\ud83d\udc4f", "430": "\ud83d\udc50", "431": "\ud83d\udc51", "432": "\ud83d\udc52", "433": "\ud83d\udc53", "434": "\ud83d\udc54", "435": "\ud83d\udc55", "436": "\ud83d\udc56", "437": "\ud83d\udc57", "438": "\ud83d\udc58", "439": "\ud83d\udc59", "440": "\ud83d\udc5a", "441": "\ud83d\udc5b", "442": "\ud83d\udc5c", "443": "\ud83d\udc5f", "444": "\ud83d\udc60", "445": "\ud83d\udc62", "446": "\ud83d\udc63", "447": "\ud83d\udc64", "448": "\ud83d\udc65", "449": "\ud83d\udc66", "450": "\ud83d\udc67", "451": "\ud83d\udc68", "452": "\ud83d\udc69", "453": "\ud83d\udc6a", "454": "\ud83d\udc6b", "455": "\ud83d\udc6c", "456": "\ud83d\udc6d", "457": "\ud83d\udc6e", "458": "\ud83d\udc6f", "459": "\ud83d\udc70", "460": "\ud83d\udc71", "461": "\ud83d\udc73", "462": "\ud83d\udc74", "463": "\ud83d\udc75", "464": "\ud83d\udc76", "465": "\ud83d\udc77", "466": "\ud83d\udc78", "467": "\ud83d\udc79", "468": "\ud83d\udc7b", "469": "\ud83d\udc7d", "470": "\ud83d\udc7e", "471": "\ud83d\udc80", "472": "\ud83d\udc81", "473": "\ud83d\udc82", "474": "\ud83d\udc83", "475": "\ud83d\udc84", "476": "\ud83d\udc85", "477": "\ud83d\udc86", "478": "\ud83d\udc87", "479": "\ud83d\udc88", "480": "\ud83d\udc89", "481": "\ud83d\udc8a", "482": "\ud83d\udc8b", "483": "\ud83d\udc8c", "484": "\ud83d\udc8d", "485": "\ud83d\udc8e", "486": "\ud83d\udc8f", "487": "\ud83d\udc90", "488": "\ud83d\udc91", "489": "\ud83d\udc92", "490": "\ud83d\udc93", "491": "\ud83d\udc94", "492": "\ud83d\udc95", "493": "\ud83d\udc96", "494": "\ud83d\udc98", "495": "\ud83d\udc99", "496": "\ud83d\udc9a", "497": "\ud83d\udc9b", "498": "\ud83d\udc9c", "499": "\ud83d\udc9d", "500": "\ud83d\udc9e", "501": "\ud83d\udc9f", "502": "\ud83d\udca1", "503": "\ud83d\udca2", "504": "\ud83d\udca3", "505": "\ud83d\udca5", "506": "\ud83d\udca6", "507": "\ud83d\udca7", "508": "\ud83d\udca8", "509": "\ud83d\udca9", "510": "\ud83d\udcaa", "511": "\ud83d\udcab", "512": "\ud83d\udcac", "513": "\ud83d\udcad", "514": "\ud83d\udcae", "515": "\ud83d\udcaf", "516": "\ud83d\udcb0", "517": "\ud83d\udcb1", "518": "\ud83d\udcb2", "519": "\ud83d\udcb3", "520": "\ud83d\udcb4", "521": "\ud83d\udcb5", "522": "\ud83d\udcb6", "523": "\ud83d\udcb7", "524": "\ud83d\udcb8", "525": "\ud83d\udcb9", "526": "\ud83d\udcba", "527": "\ud83d\udcbb", "528": "\ud83d\udcbc", "529": "\ud83d\udcbd", "530": "\ud83d\udcbe", "531": "\ud83d\udcbf", "532": "\ud83d\udcc0", "533": "\ud83d\udcc1", "534": "\ud83d\udcc2", "535": "\ud83d\udcc3", "536": "\ud83d\udcc4", "537": "\ud83d\udcc5", "538": "\ud83d\udcc6", "539": "\ud83d\udcc7", "540": "\ud83d\udcc8", "541": "\ud83d\udcc9", "542": "\ud83d\udcca", "543": "\ud83d\udccb", "544": "\ud83d\udccc", "545": "\ud83d\udccd", "546": "\ud83d\udcce", "547": "\ud83d\udccf", "548": "\ud83d\udcd0", "549": "\ud83d\udcd1", "550": "\ud83d\udcd2", "551": "\ud83d\udcd3", "552": "\ud83d\udcd4", "553": "\ud83d\udcd5", "554": "\ud83d\udcd6", "555": "\ud83d\udcd7", "556": "\ud83d\udcd8", "557": "\ud83d\udcd9", "558": "\ud83d\udcda", "559": "\ud83d\udcdb", "560": "\ud83d\udcdc", "561": "\ud83d\udcdd", "562": "\ud83d\udcde", "563": "\ud83d\udcdf", "564": "\ud83d\udce0", "565": "\ud83d\udce1", "566": "\ud83d\udce2", "567": "\ud83d\udce3", "568": "\ud83d\udce4", "569": "\ud83d\udce5", "570": "\ud83d\udce6", "571": "\ud83d\udce7", "572": "\ud83d\udce8", "573": "\ud83d\udce9", "574": "\ud83d\udceb", "575": "\ud83d\udcec", "576": "\ud83d\udced", "577": "\ud83d\udcee", "578": "\ud83d\udcef", "579": "\ud83d\udcf0", "580": "\ud83d\udcf1", "581": "\ud83d\udcf2", "582": "\ud83d\udcf3", "583": "\ud83d\udcf4", "584": "\ud83d\udcf5", "585": "\ud83d\udcf6", "586": "\ud83d\udcf7", "587": "\ud83d\udcf8", "588": "\ud83d\udcf9", "589": "\ud83d\udcfa", "590": "\ud83d\udcfb", "591": "\ud83d\udcfc", "592": "\ud83d\udcfd", "593": "\ud83d\udcff", "594": "\ud83d\udd00", "595": "\ud83d\udd01", "596": "\ud83d\udd02", "597": "\ud83d\udd03", "598": "\ud83d\udd04", "599": "\ud83d\udd07", "600": "\ud83d\udd08", "601": "\ud83d\udd0a", "602": "\ud83d\udd0b", "603": "\ud83d\udd0c", "604": "\ud83d\udd0d", "605": "\ud83d\udd0e", "606": "\ud83d\udd0f", "607": "\ud83d\udd10", "608": "\ud83d\udd11", "609": "\ud83d\udd12", "610": "\ud83d\udd13", "611": "\ud83d\udd14", "612": "\ud83d\udd15", "613": "\ud83d\udd16", "614": "\ud83d\udd17", "615": "\ud83d\udd18", "616": "\ud83d\udd19", "617": "\ud83d\udd1c", "618": "\ud83d\udd1d", "619": "\ud83d\udd1f", "620": "\ud83d\udd20", "621": "\ud83d\udd21", "622": "\ud83d\udd22", "623": "\ud83d\udd24", "624": "\ud83d\udd25", "625": "\ud83d\udd26", "626": "\ud83d\udd27", "627": "\ud83d\udd28", "628": "\ud83d\udd29", "629": "\ud83d\udd2a", "630": "\ud83d\udd2b", "631": "\ud83d\udd2c", "632": "\ud83d\udd2d", "633": "\ud83d\udd2e", "634": "\ud83d\udd30", "635": "\ud83d\udd32", "636": "\ud83d\udd33", "637": "\ud83d\udd34", "638": "\ud83d\udd35", "639": "\ud83d\udd36", "640": "\ud83d\udd37", "641": "\ud83d\udd38", "642": "\ud83d\udd39", "643": "\ud83d\udd3a", "644": "\ud83d\udd3c", "645": "\ud83d\udd4a", "646": "\ud83d\udd4b", "647": "\ud83d\udd4d", "648": "\ud83d\udd53", "649": "\ud83d\udd5c", "650": "\ud83d\udd64", "651": "\ud83d\udd6f", "652": "\ud83d\udd70", "653": "\ud83d\udd73", "654": "\ud83d\udd74", "655": "\ud83d\udd75", "656": "\ud83d\udd76", "657": "\ud83d\udd77", "658": "\ud83d\udd78", "659": "\ud83d\udd79", "660": "\ud83d\udd7a", "661": "\ud83d\udd87", "662": "\ud83d\udd8a", "663": "\ud83d\udd8b", "664": "\ud83d\udd8c", "665": "\ud83d\udd8d", "666": "\ud83d\udd90", "667": "\ud83d\udd95", "668": "\ud83d\udd96", "669": "\ud83d\udda4", "670": "\ud83d\udda5", "671": "\ud83d\udda8", "672": "\ud83d\uddb1", "673": "\ud83d\uddbc", "674": "\ud83d\uddc2", "675": "\ud83d\uddc3", "676": "\ud83d\uddc4", "677": "\ud83d\uddd1", "678": "\ud83d\uddd2", "679": "\ud83d\uddd3", "680": "\ud83d\udddc", "681": "\ud83d\udddd", "682": "\ud83d\uddde", "683": "\ud83d\udde1", "684": "\ud83d\udde3", "685": "\ud83d\uddef", "686": "\ud83d\uddfa", "687": "\ud83d\uddfb", "688": "\ud83d\uddfc", "689": "\ud83d\uddfd", "690": "\ud83d\uddfe", "691": "\ud83d\uddff", "692": "\ud83d\ude00", "693": "\ud83d\ude01", "694": "\ud83d\ude02", "695": "\ud83d\ude03", "696": "\ud83d\ude04", "697": "\ud83d\ude05", "698": "\ud83d\ude06", "699": "\ud83d\ude07", "700": "\ud83d\ude08", "701": "\ud83d\ude09", "702": "\ud83d\ude0a", "703": "\ud83d\ude0b", "704": "\ud83d\ude0c", "705": "\ud83d\ude0d", "706": "\ud83d\ude0e", "707": "\ud83d\ude0f", "708": "\ud83d\ude10", "709": "\ud83d\ude11", "710": "\ud83d\ude12", "711": "\ud83d\ude13", "712": "\ud83d\ude14", "713": "\ud83d\ude15", "714": "\ud83d\ude16", "715": "\ud83d\ude17", "716": "\ud83d\ude18", "717": "\ud83d\ude19", "718": "\ud83d\ude1a", "719": "\ud83d\ude1b", "720": "\ud83d\ude1c", "721": "\ud83d\ude1d", "722": "\ud83d\ude1e", "723": "\ud83d\ude1f", "724": "\ud83d\ude20", "725": "\ud83d\ude21", "726": "\ud83d\ude22", "727": "\ud83d\ude23", "728": "\ud83d\ude24", "729": "\ud83d\ude25", "730": "\ud83d\ude26", "731": "\ud83d\ude27", "732": "\ud83d\ude28", "733": "\ud83d\ude29", "734": "\ud83d\ude2a", "735": "\ud83d\ude2b", "736": "\ud83d\ude2c", "737": "\ud83d\ude2d", "738": "\ud83d\ude2e", "739": "\ud83d\ude2f", "740": "\ud83d\ude30", "741": "\ud83d\ude31", "742": "\ud83d\ude32", "743": "\ud83d\ude33", "744": "\ud83d\ude34", "745": "\ud83d\ude35", "746": "\ud83d\ude36", "747": "\ud83d\ude37", "748": "\ud83d\ude38", "749": "\ud83d\ude39", "750": "\ud83d\ude3a", "751": "\ud83d\ude3b", "752": "\ud83d\ude3c", "753": "\ud83d\ude3d", "754": "\ud83d\ude3e", "755": "\ud83d\ude3f", "756": "\ud83d\ude40", "757": "\ud83d\ude41", "758": "\ud83d\ude42", "759": "\ud83d\ude43", "760": "\ud83d\ude44", "761": "\ud83d\ude45", "762": "\ud83d\ude46", "763": "\ud83d\ude47", "764": "\ud83d\ude48", "765": "\ud83d\ude49", "766": "\ud83d\ude4a", "767": "\ud83d\ude4b", "768": "\ud83d\ude4c", "769": "\ud83d\ude4d", "770": "\ud83d\ude4e", "771": "\ud83d\ude4f", "772": "\ud83d\ude80", "773": "\ud83d\ude81", "774": "\ud83d\ude82", "775": "\ud83d\ude83", "776": "\ud83d\ude84", "777": "\ud83d\ude85", "778": "\ud83d\ude86", "779": "\ud83d\ude87", "780": "\ud83d\ude88", "781": "\ud83d\ude89", "782": "\ud83d\ude8a", "783": "\ud83d\ude8b", "784": "\ud83d\ude8c", "785": "\ud83d\ude8d", "786": "\ud83d\ude8e", "787": "\ud83d\ude8f", "788": "\ud83d\ude90", "789": "\ud83d\ude91", "790": "\ud83d\ude92", "791": "\ud83d\ude93", "792": "\ud83d\ude94", "793": "\ud83d\ude95", "794": "\ud83d\ude96", "795": "\ud83d\ude97", "796": "\ud83d\ude98", "797": "\ud83d\ude99", "798": "\ud83d\ude9a", "799": "\ud83d\ude9b", "800": "\ud83d\ude9c", "801": "\ud83d\ude9d", "802": "\ud83d\ude9e", "803": "\ud83d\ude9f", "804": "\ud83d\udea0", "805": "\ud83d\udea1", "806": "\ud83d\udea2", "807": "\ud83d\udea3", "808": "\ud83d\udea4", "809": "\ud83d\udea5", "810": "\ud83d\udea6", "811": "\ud83d\udea7", "812": "\ud83d\udea8", "813": "\ud83d\udea9", "814": "\ud83d\udeaa", "815": "\ud83d\udeab", "816": "\ud83d\udead", "817": "\ud83d\udeae", "818": "\ud83d\udeb0", "819": "\ud83d\udeb1", "820": "\ud83d\udeb2", "821": "\ud83d\udeb3", "822": "\ud83d\udeb4", "823": "\ud83d\udeb5", "824": "\ud83d\udeb6", "825": "\ud83d\udeb7", "826": "\ud83d\udeb8", "827": "\ud83d\udebb", "828": "\ud83d\udebc", "829": "\ud83d\udebd", "830": "\ud83d\udebf", "831": "\ud83d\udec0", "832": "\ud83d\udec1", "833": "\ud83d\udec2", "834": "\ud83d\udec3", "835": "\ud83d\udec4", "836": "\ud83d\udec5", "837": "\ud83d\udecb", "838": "\ud83d\udece", "839": "\ud83d\uded1", "840": "\ud83d\udee0", "841": "\ud83d\udee1", "842": "\ud83d\udee2", "843": "\ud83d\udee3", "844": "\ud83d\udee5", "845": "\ud83d\udee9", "846": "\ud83d\udeeb", "847": "\ud83d\udeec", "848": "\ud83d\udef0", "849": "\ud83d\udef3", "850": "\ud83d\udef4", "851": "\ud83d\udef5", "852": "\ud83d\udef6", "853": "\ud83d\udef7", "854": "\ud83d\udef9", "855": "\ud83d\udfe2", "856": "\ud83d\udfe5", "857": "\ud83d\udfe7", "858": "\ud83e\udd0f", "859": "\ud83e\udd10", "860": "\ud83e\udd11", "861": "\ud83e\udd13", "862": "\ud83e\udd14", "863": "\ud83e\udd15", "864": "\ud83e\udd16", "865": "\ud83e\udd18", "866": "\ud83e\udd19", "867": "\ud83e\udd1c", "868": "\ud83e\udd1d", "869": "\ud83e\udd1e", "870": "\ud83e\udd20", "871": "\ud83e\udd21", "872": "\ud83e\udd22", "873": "\ud83e\udd23", "874": "\ud83e\udd26", "875": "\ud83e\udd27", "876": "\ud83e\udd29", "877": "\ud83e\udd2a", "878": "\ud83e\udd2b", "879": "\ud83e\udd2c", "880": "\ud83e\udd2d", "881": "\ud83e\udd2e", "882": "\ud83e\udd2f", "883": "\ud83e\udd33", "884": "\ud83e\udd35", "885": "\ud83e\udd37", "886": "\ud83e\udd38", "887": "\ud83e\udd39", "888": "\ud83e\udd3a", "889": "\ud83e\udd3c", "890": "\ud83e\udd3e", "891": "\ud83e\udd41", "892": "\ud83e\udd42", "893": "\ud83e\udd45", "894": "\ud83e\udd47", "895": "\ud83e\udd48", "896": "\ud83e\udd51", "897": "\ud83e\udd55", "898": "\ud83e\udd57", "899": "\ud83e\udd58", "900": "\ud83e\udd5a", "901": "\ud83e\udd5d", "902": "\ud83e\udd5e", "903": "\ud83e\udd5f", "904": "\ud83e\udd60", "905": "\ud83e\udd64", "906": "\ud83e\udd65", "907": "\ud83e\udd68", "908": "\ud83e\udd69", "909": "\ud83e\udd6d", "910": "\ud83e\udd6f", "911": "\ud83e\udd73", "912": "\ud83e\udd74", "913": "\ud83e\udd75", "914": "\ud83e\udd76", "915": "\ud83e\udd78", "916": "\ud83e\udd79", "917": "\ud83e\udd7a", "918": "\ud83e\udd7d", "919": "\ud83e\udd7e", "920": "\ud83e\udd80", "921": "\ud83e\udd81", "922": "\ud83e\udd82", "923": "\ud83e\udd83", "924": "\ud83e\udd84", "925": "\ud83e\udd85", "926": "\ud83e\udd86", "927": "\ud83e\udd87", "928": "\ud83e\udd88", "929": "\ud83e\udd89", "930": "\ud83e\udd8a", "931": "\ud83e\udd8b", "932": "\ud83e\udd8d", "933": "\ud83e\udd8e", "934": "\ud83e\udd8f", "935": "\ud83e\udd90", "936": "\ud83e\udd95", "937": "\ud83e\udd96", "938": "\ud83e\udd99", "939": "\ud83e\udd9c", "940": "\ud83e\udd9f", "941": "\ud83e\uddad", "942": "\ud83e\uddba", "943": "\ud83e\uddbd", "944": "\ud83e\uddc0", "945": "\ud83e\uddc3", "946": "\ud83e\uddca", "947": "\ud83e\uddcc", "948": "\ud83e\uddcd", "949": "\ud83e\uddce", "950": "\ud83e\uddcf", "951": "\ud83e\uddd0", "952": "\ud83e\uddd1", "953": "\ud83e\uddd2", "954": "\ud83e\uddd4", "955": "\ud83e\uddd6", "956": "\ud83e\uddd7", "957": "\ud83e\uddd8", "958": "\ud83e\uddd9", "959": "\ud83e\udddc", "960": "\ud83e\udddd", "961": "\ud83e\uddde", "962": "\ud83e\udddf", "963": "\ud83e\udde0", "964": "\ud83e\udde2", "965": "\ud83e\udde4", "966": "\ud83e\udde6", "967": "\ud83e\udde7", "968": "\ud83e\uddea", "969": "\ud83e\uddeb", "970": "\ud83e\uddef", "971": "\ud83e\uddf0", "972": "\ud83e\uddf9", "973": "\ud83e\uddfc", "974": "\ud83e\uddfd", "975": "\ud83e\uddfe", "976": "\ud83e\ude73", "977": "\ud83e\ude79", "978": "\ud83e\ude81", "979": "\ud83e\ude90", "980": "\ud83e\ude93", "981": "\ud83e\ude9c", "982": "\ud83e\udea3", "983": "\ud83e\udea6", "984": "\ud83e\udeb2"}}}}], "splits": [{"name": "train", "num_bytes": 3998825, "num_examples": 89894}], "download_size": 2678056, "dataset_size": 3998825}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2023-11-12T16:23:33+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "commitpackmeta-gitmoji"
More Information needed | [
"# Dataset Card for \"commitpackmeta-gitmoji\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"commitpackmeta-gitmoji\"\n\nMore Information needed"
]
| [
6,
17
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"commitpackmeta-gitmoji\"\n\nMore Information needed"
]
|
ae70de0d598bc0d51c8ba2996d0150f4938caae5 |

# Dataset Card for Dataset Name
This is the dataset that made OpenHermes 2.5 and Nous Hermes 2 series of models.
Support me on GitHub sponsors <3 : https://github.com/sponsors/teknium1
## Dataset Details
### Dataset Description
The Open Hermes 2/2.5 and Nous Hermes 2 models have made significant advancements of SOTA LLM's over recent months, and are underpinned by this exact compilation and curation of many open source datasets and custom created synthetic datasets.
The Open Hermes 2.5 dataset is a continuation of the Open Hermes 1 dataset, at a much larger scale, much more diverse, and much higher quality compilation, reaching 1M, primarily synthetically generated instruction and chat samples.
## Lilac Integration
This dataset has been pushed to Lilac's (a data curation and exploration platform) live HuggingFace spaces, that hosts many popular OS Datasets for exploration and curation, as well as does Text Embedding searches and Clustering of those datasets
Check out that out here: https://lilacai-lilac.hf.space/datasets#lilac/OpenHermes-2.5
## Dataset Sources
### Airoboros 2.2
By Jon Durbin: https://huggingface.co/datasets/jondurbin/airoboros-2.2
### CamelAI Domain Expert Datasets (Physics, Math, Chemistry & Biology)
By CamelAI: https://huggingface.co/camel-ai
### ChatBot Arena (GPT-4 Only)
By LMSys: https://huggingface.co/datasets/lmsys/lmsys-chat-1m
### Collective Cognition (09-11-2023)
By Teknium: https://huggingface.co/datasets/CollectiveCognition/chats-data-2023-09-22
### CoT Alpaca GPT4
I have lost the source page for this dataset, sorry
### Evol Instruct 70K && 140K
By WizardLM:
70K: https://huggingface.co/datasets/WizardLM/WizardLM_evol_instruct_70k
140k: https://huggingface.co/datasets/WizardLM/WizardLM_evol_instruct_V2_196k
### Glaive Code Assistant
By Sahil & Glaive: https://huggingface.co/datasets/glaiveai/glaive-code-assistant
### GPT4-LLM
By Baolin Peng*, Chunyuan Li*, Pengcheng He*, Michel Galley, Jianfeng Gao
https://huggingface.co/datasets/teknium/GPT4-LLM-Cleaned
### GPTeacher
By Teknium & Kuruminha: https://github.com/teknium1/GPTeacher
### Medical Tasks
By CogStack: https://github.com/CogStack/OpenGPT
### MetaMath 40k
By MetaMath: https://huggingface.co/datasets/meta-math/MetaMathQA
### SlimOrca 550K
By Wing Lian and Guan Wang and Bleys Goodson and Eugene Pentland and Austin Cook and Chanvichet Vong and Teknium:
https://huggingface.co/datasets/Open-Orca/SlimOrca
### Platypus
By Garage-bAInd: https://huggingface.co/datasets/garage-bAInd/Open-Platypus
### ShareGPT (GPT4-Only)
By LMSys: https://huggingface.co/datasets/anon8231489123/ShareGPT_Vicuna_unfiltered
### Unnatural Instructions GPT4
By Baolin Peng*, Chunyuan Li*, Pengcheng He*, Michel Galley, Jianfeng Gao:
https://github.com/Instruction-Tuning-with-GPT-4/GPT-4-LLM
## Dataset Structure
The dataset follows a sharegpt structure. This means it is a list of dictionaries, with each dictionary containing a new list of dicts called conversations. Each turn in a conversation has two dictionaries, a "from" field, which denotes the role of that turn, and a "value" field which contains the actual text.
Here is an example of an entry:
```
{
"conversations": [
{
"from": "system",
"value": ""
},
{
"from": "human",
"value": "Every day, a tree drops 7 leaves. How many leaves would it drop in a month of February in a non-leap year? Include your logic."
},
{
"from": "gpt",
"value": "Here's the logic behind this:\n\n1. We know that February has 28 days in a non-leap year.\n2. If the tree drops 7 leaves every day, then over the course of February, it would drop:\n Leaves dropped in February = Leaves per day * Days in February\n = 7 leaves * 28 days\n = 196 leaves\n\nSo, the tree would drop 196 leaves in February in a non-leap year."
}
],
"source": "airoboros2.2",
"category": "orca"
},
```
Some examples, like the one above, contain the metadata that came with the dataset, such as "category" being "orca", and many contain the source dataset that it was curated from.
## Citation
```bibtex
@misc{OpenHermes 2.5,
title = {OpenHermes 2.5: An Open Dataset of Synthetic Data for Generalist LLM Assistants},
author = {Teknium},
year = {2023},
publisher = {HuggingFace},
url = {https://huggingface.co/datasets/teknium/OpenHermes-2.5}
}
```
| teknium/OpenHermes-2.5 | [
"language:eng",
"synthetic",
"GPT-4",
"Distillation",
"Compilation",
"region:us"
]
| 2023-11-12T16:44:26+00:00 | {"language": ["eng"], "pretty_name": "OpenHermes 2.5", "tags": ["synthetic", "GPT-4", "Distillation", "Compilation"]} | 2024-02-05T02:41:31+00:00 | []
| [
"eng"
]
| TAGS
#language-English #synthetic #GPT-4 #Distillation #Compilation #region-us
|
!image/png
# Dataset Card for Dataset Name
This is the dataset that made OpenHermes 2.5 and Nous Hermes 2 series of models.
Support me on GitHub sponsors <3 : URL
## Dataset Details
### Dataset Description
The Open Hermes 2/2.5 and Nous Hermes 2 models have made significant advancements of SOTA LLM's over recent months, and are underpinned by this exact compilation and curation of many open source datasets and custom created synthetic datasets.
The Open Hermes 2.5 dataset is a continuation of the Open Hermes 1 dataset, at a much larger scale, much more diverse, and much higher quality compilation, reaching 1M, primarily synthetically generated instruction and chat samples.
## Lilac Integration
This dataset has been pushed to Lilac's (a data curation and exploration platform) live HuggingFace spaces, that hosts many popular OS Datasets for exploration and curation, as well as does Text Embedding searches and Clustering of those datasets
Check out that out here: URL
## Dataset Sources
### Airoboros 2.2
By Jon Durbin: URL
### CamelAI Domain Expert Datasets (Physics, Math, Chemistry & Biology)
By CamelAI: URL
### ChatBot Arena (GPT-4 Only)
By LMSys: URL
### Collective Cognition (09-11-2023)
By Teknium: URL
### CoT Alpaca GPT4
I have lost the source page for this dataset, sorry
### Evol Instruct 70K && 140K
By WizardLM:
70K: URL
140k: URL
### Glaive Code Assistant
By Sahil & Glaive: URL
### GPT4-LLM
By Baolin Peng*, Chunyuan Li*, Pengcheng He*, Michel Galley, Jianfeng Gao
URL
### GPTeacher
By Teknium & Kuruminha: URL
### Medical Tasks
By CogStack: URL
### MetaMath 40k
By MetaMath: URL
### SlimOrca 550K
By Wing Lian and Guan Wang and Bleys Goodson and Eugene Pentland and Austin Cook and Chanvichet Vong and Teknium:
URL
### Platypus
By Garage-bAInd: URL
### ShareGPT (GPT4-Only)
By LMSys: URL
### Unnatural Instructions GPT4
By Baolin Peng*, Chunyuan Li*, Pengcheng He*, Michel Galley, Jianfeng Gao:
URL
## Dataset Structure
The dataset follows a sharegpt structure. This means it is a list of dictionaries, with each dictionary containing a new list of dicts called conversations. Each turn in a conversation has two dictionaries, a "from" field, which denotes the role of that turn, and a "value" field which contains the actual text.
Here is an example of an entry:
Some examples, like the one above, contain the metadata that came with the dataset, such as "category" being "orca", and many contain the source dataset that it was curated from.
| [
"# Dataset Card for Dataset Name\n\nThis is the dataset that made OpenHermes 2.5 and Nous Hermes 2 series of models.\n\nSupport me on GitHub sponsors <3 : URL",
"## Dataset Details",
"### Dataset Description\n\nThe Open Hermes 2/2.5 and Nous Hermes 2 models have made significant advancements of SOTA LLM's over recent months, and are underpinned by this exact compilation and curation of many open source datasets and custom created synthetic datasets.\n\nThe Open Hermes 2.5 dataset is a continuation of the Open Hermes 1 dataset, at a much larger scale, much more diverse, and much higher quality compilation, reaching 1M, primarily synthetically generated instruction and chat samples.",
"## Lilac Integration\n\nThis dataset has been pushed to Lilac's (a data curation and exploration platform) live HuggingFace spaces, that hosts many popular OS Datasets for exploration and curation, as well as does Text Embedding searches and Clustering of those datasets\n\nCheck out that out here: URL",
"## Dataset Sources",
"### Airoboros 2.2\nBy Jon Durbin: URL",
"### CamelAI Domain Expert Datasets (Physics, Math, Chemistry & Biology)\nBy CamelAI: URL",
"### ChatBot Arena (GPT-4 Only)\nBy LMSys: URL",
"### Collective Cognition (09-11-2023)\nBy Teknium: URL",
"### CoT Alpaca GPT4\nI have lost the source page for this dataset, sorry",
"### Evol Instruct 70K && 140K\nBy WizardLM: \n70K: URL\n140k: URL",
"### Glaive Code Assistant\nBy Sahil & Glaive: URL",
"### GPT4-LLM\nBy Baolin Peng*, Chunyuan Li*, Pengcheng He*, Michel Galley, Jianfeng Gao\nURL",
"### GPTeacher\nBy Teknium & Kuruminha: URL",
"### Medical Tasks \nBy CogStack: URL",
"### MetaMath 40k\nBy MetaMath: URL",
"### SlimOrca 550K\nBy Wing Lian and Guan Wang and Bleys Goodson and Eugene Pentland and Austin Cook and Chanvichet Vong and Teknium:\nURL",
"### Platypus\nBy Garage-bAInd: URL",
"### ShareGPT (GPT4-Only)\nBy LMSys: URL",
"### Unnatural Instructions GPT4\nBy Baolin Peng*, Chunyuan Li*, Pengcheng He*, Michel Galley, Jianfeng Gao:\nURL",
"## Dataset Structure\n\nThe dataset follows a sharegpt structure. This means it is a list of dictionaries, with each dictionary containing a new list of dicts called conversations. Each turn in a conversation has two dictionaries, a \"from\" field, which denotes the role of that turn, and a \"value\" field which contains the actual text.\n\nHere is an example of an entry:\n\n\n\nSome examples, like the one above, contain the metadata that came with the dataset, such as \"category\" being \"orca\", and many contain the source dataset that it was curated from."
]
| [
"TAGS\n#language-English #synthetic #GPT-4 #Distillation #Compilation #region-us \n",
"# Dataset Card for Dataset Name\n\nThis is the dataset that made OpenHermes 2.5 and Nous Hermes 2 series of models.\n\nSupport me on GitHub sponsors <3 : URL",
"## Dataset Details",
"### Dataset Description\n\nThe Open Hermes 2/2.5 and Nous Hermes 2 models have made significant advancements of SOTA LLM's over recent months, and are underpinned by this exact compilation and curation of many open source datasets and custom created synthetic datasets.\n\nThe Open Hermes 2.5 dataset is a continuation of the Open Hermes 1 dataset, at a much larger scale, much more diverse, and much higher quality compilation, reaching 1M, primarily synthetically generated instruction and chat samples.",
"## Lilac Integration\n\nThis dataset has been pushed to Lilac's (a data curation and exploration platform) live HuggingFace spaces, that hosts many popular OS Datasets for exploration and curation, as well as does Text Embedding searches and Clustering of those datasets\n\nCheck out that out here: URL",
"## Dataset Sources",
"### Airoboros 2.2\nBy Jon Durbin: URL",
"### CamelAI Domain Expert Datasets (Physics, Math, Chemistry & Biology)\nBy CamelAI: URL",
"### ChatBot Arena (GPT-4 Only)\nBy LMSys: URL",
"### Collective Cognition (09-11-2023)\nBy Teknium: URL",
"### CoT Alpaca GPT4\nI have lost the source page for this dataset, sorry",
"### Evol Instruct 70K && 140K\nBy WizardLM: \n70K: URL\n140k: URL",
"### Glaive Code Assistant\nBy Sahil & Glaive: URL",
"### GPT4-LLM\nBy Baolin Peng*, Chunyuan Li*, Pengcheng He*, Michel Galley, Jianfeng Gao\nURL",
"### GPTeacher\nBy Teknium & Kuruminha: URL",
"### Medical Tasks \nBy CogStack: URL",
"### MetaMath 40k\nBy MetaMath: URL",
"### SlimOrca 550K\nBy Wing Lian and Guan Wang and Bleys Goodson and Eugene Pentland and Austin Cook and Chanvichet Vong and Teknium:\nURL",
"### Platypus\nBy Garage-bAInd: URL",
"### ShareGPT (GPT4-Only)\nBy LMSys: URL",
"### Unnatural Instructions GPT4\nBy Baolin Peng*, Chunyuan Li*, Pengcheng He*, Michel Galley, Jianfeng Gao:\nURL",
"## Dataset Structure\n\nThe dataset follows a sharegpt structure. This means it is a list of dictionaries, with each dictionary containing a new list of dicts called conversations. Each turn in a conversation has two dictionaries, a \"from\" field, which denotes the role of that turn, and a \"value\" field which contains the actual text.\n\nHere is an example of an entry:\n\n\n\nSome examples, like the one above, contain the metadata that came with the dataset, such as \"category\" being \"orca\", and many contain the source dataset that it was curated from."
]
| [
26,
39,
4,
120,
76,
5,
12,
27,
17,
17,
21,
24,
16,
35,
15,
12,
11,
39,
13,
18,
39,
143
]
| [
"passage: TAGS\n#language-English #synthetic #GPT-4 #Distillation #Compilation #region-us \n# Dataset Card for Dataset Name\n\nThis is the dataset that made OpenHermes 2.5 and Nous Hermes 2 series of models.\n\nSupport me on GitHub sponsors <3 : URL## Dataset Details### Dataset Description\n\nThe Open Hermes 2/2.5 and Nous Hermes 2 models have made significant advancements of SOTA LLM's over recent months, and are underpinned by this exact compilation and curation of many open source datasets and custom created synthetic datasets.\n\nThe Open Hermes 2.5 dataset is a continuation of the Open Hermes 1 dataset, at a much larger scale, much more diverse, and much higher quality compilation, reaching 1M, primarily synthetically generated instruction and chat samples.## Lilac Integration\n\nThis dataset has been pushed to Lilac's (a data curation and exploration platform) live HuggingFace spaces, that hosts many popular OS Datasets for exploration and curation, as well as does Text Embedding searches and Clustering of those datasets\n\nCheck out that out here: URL## Dataset Sources### Airoboros 2.2\nBy Jon Durbin: URL### CamelAI Domain Expert Datasets (Physics, Math, Chemistry & Biology)\nBy CamelAI: URL### ChatBot Arena (GPT-4 Only)\nBy LMSys: URL### Collective Cognition (09-11-2023)\nBy Teknium: URL### CoT Alpaca GPT4\nI have lost the source page for this dataset, sorry### Evol Instruct 70K && 140K\nBy WizardLM: \n70K: URL\n140k: URL### Glaive Code Assistant\nBy Sahil & Glaive: URL### GPT4-LLM\nBy Baolin Peng*, Chunyuan Li*, Pengcheng He*, Michel Galley, Jianfeng Gao\nURL### GPTeacher\nBy Teknium & Kuruminha: URL### Medical Tasks \nBy CogStack: URL### MetaMath 40k\nBy MetaMath: URL"
]
|
6fcb205927d132cc85187b5e42a59b631051f3f5 | # Dataset Card for "processed-bestofredditorupdates"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | derek-thomas/processed-bestofredditorupdates | [
"region:us"
]
| 2023-11-12T17:07:02+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}], "dataset_info": {"features": [{"name": "date_utc", "dtype": "timestamp[ns]"}, {"name": "title", "dtype": "string"}, {"name": "flair", "dtype": "string"}, {"name": "content", "dtype": "string"}, {"name": "poster", "dtype": "string"}, {"name": "permalink", "dtype": "string"}, {"name": "id", "dtype": "string"}, {"name": "content_length", "dtype": "int64"}, {"name": "score", "dtype": "int64"}, {"name": "embedding", "sequence": "float64"}], "splits": [{"name": "train", "num_bytes": 122231779, "num_examples": 9991}], "download_size": 48802673, "dataset_size": 122231779}} | 2023-11-12T17:07:54+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "processed-bestofredditorupdates"
More Information needed | [
"# Dataset Card for \"processed-bestofredditorupdates\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"processed-bestofredditorupdates\"\n\nMore Information needed"
]
| [
6,
20
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"processed-bestofredditorupdates\"\n\nMore Information needed"
]
|
7c5b8649179de1c52fdd8618de20968936a04152 | # Dataset Card for "eunews-eng"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | gizemgg/eunews-eng | [
"region:us"
]
| 2023-11-12T17:30:29+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "text", "dtype": "string"}, {"name": "__index_level_0__", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 1741935277, "num_examples": 589938}, {"name": "test", "num_bytes": 438103409, "num_examples": 147484}], "download_size": 827642652, "dataset_size": 2180038686}} | 2023-11-12T17:33:54+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "eunews-eng"
More Information needed | [
"# Dataset Card for \"eunews-eng\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"eunews-eng\"\n\nMore Information needed"
]
| [
6,
14
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"eunews-eng\"\n\nMore Information needed"
]
|
c33f26e5f279c98a99c3d949cbb2ddd668dd89d8 | # Dataset Card for "LLM-TTA-Cached-Rewrites"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | Kyle1668/LLM-TTA-Cached-Rewrites | [
"region:us"
]
| 2023-11-12T17:49:43+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "boss_sentiment_stabilityai_StableBeluga_13B_tempequals0dot0", "path": "data/boss_sentiment_stabilityai_StableBeluga_13B_tempequals0dot0-*"}, {"split": "ag_news_twitter_aug_substitute", "path": "data/ag_news_twitter_aug_substitute-*"}, {"split": "boss_sentiment_aug_insert", "path": "data/boss_sentiment_aug_insert-*"}, {"split": "ag_news_twitter_aug_insert", "path": "data/ag_news_twitter_aug_insert-*"}, {"split": "ag_news_twitter_aug_back_translate", "path": "data/ag_news_twitter_aug_back_translate-*"}, {"split": "boss_toxicity_stabilityai_StableBeluga_7b_tempequals0dot0", "path": "data/boss_toxicity_stabilityai_StableBeluga_7b_tempequals0dot0-*"}, {"split": "ag_news_twitter_stabilityai_StableBeluga_7b_tempequals0dot0", "path": "data/ag_news_twitter_stabilityai_StableBeluga_7b_tempequals0dot0-*"}, {"split": "boss_sentiment_aug_back_translate", "path": "data/boss_sentiment_aug_back_translate-*"}, {"split": "boss_toxicity_aug_substitute", "path": "data/boss_toxicity_aug_substitute-*"}, {"split": "boss_toxicity_aug_insert", "path": "data/boss_toxicity_aug_insert-*"}, {"split": "boss_sentiment_aug_substitute", "path": "data/boss_sentiment_aug_substitute-*"}, {"split": "boss_toxicity_aug_back_translate", "path": "data/boss_toxicity_aug_back_translate-*"}, {"split": "boss_sentiment_stabilityai_StableBeluga_7b_tempequals0dot0", "path": "data/boss_sentiment_stabilityai_StableBeluga_7b_tempequals0dot0-*"}]}], "dataset_info": {"features": [{"name": "prompt_hash", "dtype": "string"}, {"name": "prompt", "dtype": "string"}, {"name": "rewrites", "dtype": "string"}], "splits": [{"name": "boss_sentiment_stabilityai_StableBeluga_13B_tempequals0dot0", "num_bytes": 3703118, "num_examples": 2132}, {"name": "ag_news_twitter_aug_substitute", "num_bytes": 22069756, "num_examples": 15200}, {"name": "boss_sentiment_aug_insert", "num_bytes": 101392185, "num_examples": 61580}, {"name": "ag_news_twitter_aug_insert", "num_bytes": 25877025, "num_examples": 15200}, {"name": "ag_news_twitter_aug_back_translate", "num_bytes": 21078091, "num_examples": 15200}, {"name": "boss_toxicity_stabilityai_StableBeluga_7b_tempequals0dot0", "num_bytes": 659072364, "num_examples": 240078}, {"name": "ag_news_twitter_stabilityai_StableBeluga_7b_tempequals0dot0", "num_bytes": 82978276, "num_examples": 30400}, {"name": "boss_sentiment_aug_back_translate", "num_bytes": 75819709, "num_examples": 61580}, {"name": "boss_toxicity_aug_substitute", "num_bytes": 200434523, "num_examples": 120032}, {"name": "boss_toxicity_aug_insert", "num_bytes": 222397157, "num_examples": 120032}, {"name": "boss_sentiment_aug_substitute", "num_bytes": 91318472, "num_examples": 61580}, {"name": "boss_toxicity_aug_back_translate", "num_bytes": 186461827, "num_examples": 120032}, {"name": "boss_sentiment_stabilityai_StableBeluga_7b_tempequals0dot0", "num_bytes": 291064861, "num_examples": 123133}], "download_size": 714228880, "dataset_size": 1983667364}} | 2024-02-07T18:52:47+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "LLM-TTA-Cached-Rewrites"
More Information needed | [
"# Dataset Card for \"LLM-TTA-Cached-Rewrites\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"LLM-TTA-Cached-Rewrites\"\n\nMore Information needed"
]
| [
6,
21
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"LLM-TTA-Cached-Rewrites\"\n\nMore Information needed"
]
|
93b272db9faefbf1c4c230515c08f0e8885f729d |
# Dataset Card for Evaluation run of revolutionarybukhari/Llama-2-7b-chat-finetune-AUTOMATE
## Dataset Description
- **Homepage:**
- **Repository:** https://huggingface.co/revolutionarybukhari/Llama-2-7b-chat-finetune-AUTOMATE
- **Paper:**
- **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
- **Point of Contact:** [email protected]
### Dataset Summary
Dataset automatically created during the evaluation run of model [revolutionarybukhari/Llama-2-7b-chat-finetune-AUTOMATE](https://huggingface.co/revolutionarybukhari/Llama-2-7b-chat-finetune-AUTOMATE) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_revolutionarybukhari__Llama-2-7b-chat-finetune-AUTOMATE_public",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2023-11-12T17:51:35.598056](https://huggingface.co/datasets/open-llm-leaderboard/details_revolutionarybukhari__Llama-2-7b-chat-finetune-AUTOMATE_public/blob/main/results_2023-11-12T17-51-35.598056.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.48664997345720373,
"acc_stderr": 0.03427289794847252,
"acc_norm": 0.4932299888431757,
"acc_norm_stderr": 0.03508094254293674,
"mc1": 0.2876376988984088,
"mc1_stderr": 0.015846315101394812,
"mc2": 0.44729919889234016,
"mc2_stderr": 0.015286276115878357,
"em": 0.010906040268456376,
"em_stderr": 0.0010636334198498001,
"f1": 0.06768770973154396,
"f1_stderr": 0.0017077194500790263
},
"harness|arc:challenge|25": {
"acc": 0.4906143344709898,
"acc_stderr": 0.014608816322065,
"acc_norm": 0.5307167235494881,
"acc_norm_stderr": 0.014583792546304037
},
"harness|hellaswag|10": {
"acc": 0.5622385978888668,
"acc_stderr": 0.004950973231188739,
"acc_norm": 0.7559251145190201,
"acc_norm_stderr": 0.004286594977390899
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.29,
"acc_stderr": 0.04560480215720684,
"acc_norm": 0.29,
"acc_norm_stderr": 0.04560480215720684
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.42962962962962964,
"acc_stderr": 0.04276349494376599,
"acc_norm": 0.42962962962962964,
"acc_norm_stderr": 0.04276349494376599
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.46710526315789475,
"acc_stderr": 0.040601270352363966,
"acc_norm": 0.46710526315789475,
"acc_norm_stderr": 0.040601270352363966
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.53,
"acc_stderr": 0.05016135580465919,
"acc_norm": 0.53,
"acc_norm_stderr": 0.05016135580465919
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.539622641509434,
"acc_stderr": 0.030676096599389184,
"acc_norm": 0.539622641509434,
"acc_norm_stderr": 0.030676096599389184
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.5347222222222222,
"acc_stderr": 0.04171115858181618,
"acc_norm": 0.5347222222222222,
"acc_norm_stderr": 0.04171115858181618
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542127,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542127
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.4,
"acc_stderr": 0.049236596391733084,
"acc_norm": 0.4,
"acc_norm_stderr": 0.049236596391733084
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.35,
"acc_stderr": 0.047937248544110196,
"acc_norm": 0.35,
"acc_norm_stderr": 0.047937248544110196
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.4046242774566474,
"acc_stderr": 0.03742461193887248,
"acc_norm": 0.4046242774566474,
"acc_norm_stderr": 0.03742461193887248
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.22549019607843138,
"acc_stderr": 0.041583075330832865,
"acc_norm": 0.22549019607843138,
"acc_norm_stderr": 0.041583075330832865
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.56,
"acc_stderr": 0.04988876515698589,
"acc_norm": 0.56,
"acc_norm_stderr": 0.04988876515698589
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.42127659574468085,
"acc_stderr": 0.03227834510146268,
"acc_norm": 0.42127659574468085,
"acc_norm_stderr": 0.03227834510146268
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.37719298245614036,
"acc_stderr": 0.045595221419582166,
"acc_norm": 0.37719298245614036,
"acc_norm_stderr": 0.045595221419582166
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.503448275862069,
"acc_stderr": 0.041665675771015785,
"acc_norm": 0.503448275862069,
"acc_norm_stderr": 0.041665675771015785
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.2962962962962963,
"acc_stderr": 0.023517294335963286,
"acc_norm": 0.2962962962962963,
"acc_norm_stderr": 0.023517294335963286
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.24603174603174602,
"acc_stderr": 0.03852273364924314,
"acc_norm": 0.24603174603174602,
"acc_norm_stderr": 0.03852273364924314
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.41,
"acc_stderr": 0.049431107042371025,
"acc_norm": 0.41,
"acc_norm_stderr": 0.049431107042371025
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.532258064516129,
"acc_stderr": 0.028384747788813332,
"acc_norm": 0.532258064516129,
"acc_norm_stderr": 0.028384747788813332
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.35960591133004927,
"acc_stderr": 0.03376458246509566,
"acc_norm": 0.35960591133004927,
"acc_norm_stderr": 0.03376458246509566
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.42,
"acc_stderr": 0.049604496374885836,
"acc_norm": 0.42,
"acc_norm_stderr": 0.049604496374885836
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.5696969696969697,
"acc_stderr": 0.03866225962879077,
"acc_norm": 0.5696969696969697,
"acc_norm_stderr": 0.03866225962879077
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.5909090909090909,
"acc_stderr": 0.03502975799413007,
"acc_norm": 0.5909090909090909,
"acc_norm_stderr": 0.03502975799413007
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.7098445595854922,
"acc_stderr": 0.03275264467791516,
"acc_norm": 0.7098445595854922,
"acc_norm_stderr": 0.03275264467791516
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.4230769230769231,
"acc_stderr": 0.02504919787604234,
"acc_norm": 0.4230769230769231,
"acc_norm_stderr": 0.02504919787604234
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.26666666666666666,
"acc_stderr": 0.02696242432507383,
"acc_norm": 0.26666666666666666,
"acc_norm_stderr": 0.02696242432507383
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.4369747899159664,
"acc_stderr": 0.03221943636566196,
"acc_norm": 0.4369747899159664,
"acc_norm_stderr": 0.03221943636566196
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.304635761589404,
"acc_stderr": 0.03757949922943343,
"acc_norm": 0.304635761589404,
"acc_norm_stderr": 0.03757949922943343
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.671559633027523,
"acc_stderr": 0.02013590279729841,
"acc_norm": 0.671559633027523,
"acc_norm_stderr": 0.02013590279729841
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.3287037037037037,
"acc_stderr": 0.032036140846700596,
"acc_norm": 0.3287037037037037,
"acc_norm_stderr": 0.032036140846700596
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.6813725490196079,
"acc_stderr": 0.032702871814820796,
"acc_norm": 0.6813725490196079,
"acc_norm_stderr": 0.032702871814820796
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.6708860759493671,
"acc_stderr": 0.03058732629470237,
"acc_norm": 0.6708860759493671,
"acc_norm_stderr": 0.03058732629470237
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.5874439461883408,
"acc_stderr": 0.03304062175449297,
"acc_norm": 0.5874439461883408,
"acc_norm_stderr": 0.03304062175449297
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.5877862595419847,
"acc_stderr": 0.04317171194870255,
"acc_norm": 0.5877862595419847,
"acc_norm_stderr": 0.04317171194870255
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.6446280991735537,
"acc_stderr": 0.0436923632657398,
"acc_norm": 0.6446280991735537,
"acc_norm_stderr": 0.0436923632657398
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.6111111111111112,
"acc_stderr": 0.04712821257426769,
"acc_norm": 0.6111111111111112,
"acc_norm_stderr": 0.04712821257426769
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.5521472392638037,
"acc_stderr": 0.03906947479456606,
"acc_norm": 0.5521472392638037,
"acc_norm_stderr": 0.03906947479456606
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.3482142857142857,
"acc_stderr": 0.04521829902833586,
"acc_norm": 0.3482142857142857,
"acc_norm_stderr": 0.04521829902833586
},
"harness|hendrycksTest-management|5": {
"acc": 0.6893203883495146,
"acc_stderr": 0.045821241601615506,
"acc_norm": 0.6893203883495146,
"acc_norm_stderr": 0.045821241601615506
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.7222222222222222,
"acc_stderr": 0.02934311479809446,
"acc_norm": 0.7222222222222222,
"acc_norm_stderr": 0.02934311479809446
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.53,
"acc_stderr": 0.05016135580465919,
"acc_norm": 0.53,
"acc_norm_stderr": 0.05016135580465919
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.6704980842911877,
"acc_stderr": 0.01680832226174046,
"acc_norm": 0.6704980842911877,
"acc_norm_stderr": 0.01680832226174046
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.5202312138728323,
"acc_stderr": 0.026897049996382875,
"acc_norm": 0.5202312138728323,
"acc_norm_stderr": 0.026897049996382875
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.2223463687150838,
"acc_stderr": 0.013907189208156881,
"acc_norm": 0.2223463687150838,
"acc_norm_stderr": 0.013907189208156881
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.5098039215686274,
"acc_stderr": 0.028624412550167958,
"acc_norm": 0.5098039215686274,
"acc_norm_stderr": 0.028624412550167958
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.572347266881029,
"acc_stderr": 0.02809924077580956,
"acc_norm": 0.572347266881029,
"acc_norm_stderr": 0.02809924077580956
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.5740740740740741,
"acc_stderr": 0.027513747284379428,
"acc_norm": 0.5740740740740741,
"acc_norm_stderr": 0.027513747284379428
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.3723404255319149,
"acc_stderr": 0.02883892147125146,
"acc_norm": 0.3723404255319149,
"acc_norm_stderr": 0.02883892147125146
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.34615384615384615,
"acc_stderr": 0.012150699768228556,
"acc_norm": 0.34615384615384615,
"acc_norm_stderr": 0.012150699768228556
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.4742647058823529,
"acc_stderr": 0.03033257809455504,
"acc_norm": 0.4742647058823529,
"acc_norm_stderr": 0.03033257809455504
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.4869281045751634,
"acc_stderr": 0.020220920829626916,
"acc_norm": 0.4869281045751634,
"acc_norm_stderr": 0.020220920829626916
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.5272727272727272,
"acc_stderr": 0.04782001791380061,
"acc_norm": 0.5272727272727272,
"acc_norm_stderr": 0.04782001791380061
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.5306122448979592,
"acc_stderr": 0.031949171367580624,
"acc_norm": 0.5306122448979592,
"acc_norm_stderr": 0.031949171367580624
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.6517412935323383,
"acc_stderr": 0.033687874661154596,
"acc_norm": 0.6517412935323383,
"acc_norm_stderr": 0.033687874661154596
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.71,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.71,
"acc_norm_stderr": 0.045604802157206845
},
"harness|hendrycksTest-virology|5": {
"acc": 0.41566265060240964,
"acc_stderr": 0.03836722176598052,
"acc_norm": 0.41566265060240964,
"acc_norm_stderr": 0.03836722176598052
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.7251461988304093,
"acc_stderr": 0.034240429246915824,
"acc_norm": 0.7251461988304093,
"acc_norm_stderr": 0.034240429246915824
},
"harness|truthfulqa:mc|0": {
"mc1": 0.2876376988984088,
"mc1_stderr": 0.015846315101394812,
"mc2": 0.44729919889234016,
"mc2_stderr": 0.015286276115878357
},
"harness|winogrande|5": {
"acc": 0.7324388318863457,
"acc_stderr": 0.01244171845689301
},
"harness|drop|3": {
"em": 0.010906040268456376,
"em_stderr": 0.0010636334198498001,
"f1": 0.06768770973154396,
"f1_stderr": 0.0017077194500790263
},
"harness|gsm8k|5": {
"acc": 0.08642911296436695,
"acc_stderr": 0.007740044337103787
}
}
```
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
[More Information Needed] | open-llm-leaderboard/details_revolutionarybukhari__Llama-2-7b-chat-finetune-AUTOMATE | [
"region:us"
]
| 2023-11-12T17:54:40+00:00 | {"pretty_name": "Evaluation run of revolutionarybukhari/Llama-2-7b-chat-finetune-AUTOMATE", "dataset_summary": "Dataset automatically created during the evaluation run of model [revolutionarybukhari/Llama-2-7b-chat-finetune-AUTOMATE](https://huggingface.co/revolutionarybukhari/Llama-2-7b-chat-finetune-AUTOMATE) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_revolutionarybukhari__Llama-2-7b-chat-finetune-AUTOMATE_public\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-11-12T17:51:35.598056](https://huggingface.co/datasets/open-llm-leaderboard/details_revolutionarybukhari__Llama-2-7b-chat-finetune-AUTOMATE_public/blob/main/results_2023-11-12T17-51-35.598056.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.48664997345720373,\n \"acc_stderr\": 0.03427289794847252,\n \"acc_norm\": 0.4932299888431757,\n \"acc_norm_stderr\": 0.03508094254293674,\n \"mc1\": 0.2876376988984088,\n \"mc1_stderr\": 0.015846315101394812,\n \"mc2\": 0.44729919889234016,\n \"mc2_stderr\": 0.015286276115878357,\n \"em\": 0.010906040268456376,\n \"em_stderr\": 0.0010636334198498001,\n \"f1\": 0.06768770973154396,\n \"f1_stderr\": 0.0017077194500790263\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.4906143344709898,\n \"acc_stderr\": 0.014608816322065,\n \"acc_norm\": 0.5307167235494881,\n \"acc_norm_stderr\": 0.014583792546304037\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5622385978888668,\n \"acc_stderr\": 0.004950973231188739,\n \"acc_norm\": 0.7559251145190201,\n \"acc_norm_stderr\": 0.004286594977390899\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720684,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.04560480215720684\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.42962962962962964,\n \"acc_stderr\": 0.04276349494376599,\n \"acc_norm\": 0.42962962962962964,\n \"acc_norm_stderr\": 0.04276349494376599\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.46710526315789475,\n \"acc_stderr\": 0.040601270352363966,\n \"acc_norm\": 0.46710526315789475,\n \"acc_norm_stderr\": 0.040601270352363966\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.539622641509434,\n \"acc_stderr\": 0.030676096599389184,\n \"acc_norm\": 0.539622641509434,\n \"acc_norm_stderr\": 0.030676096599389184\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5347222222222222,\n \"acc_stderr\": 0.04171115858181618,\n \"acc_norm\": 0.5347222222222222,\n \"acc_norm_stderr\": 0.04171115858181618\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.049236596391733084,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.049236596391733084\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.4046242774566474,\n \"acc_stderr\": 0.03742461193887248,\n \"acc_norm\": 0.4046242774566474,\n \"acc_norm_stderr\": 0.03742461193887248\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.22549019607843138,\n \"acc_stderr\": 0.041583075330832865,\n \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.041583075330832865\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.42127659574468085,\n \"acc_stderr\": 0.03227834510146268,\n \"acc_norm\": 0.42127659574468085,\n \"acc_norm_stderr\": 0.03227834510146268\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.37719298245614036,\n \"acc_stderr\": 0.045595221419582166,\n \"acc_norm\": 0.37719298245614036,\n \"acc_norm_stderr\": 0.045595221419582166\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.503448275862069,\n \"acc_stderr\": 0.041665675771015785,\n \"acc_norm\": 0.503448275862069,\n \"acc_norm_stderr\": 0.041665675771015785\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.2962962962962963,\n \"acc_stderr\": 0.023517294335963286,\n \"acc_norm\": 0.2962962962962963,\n \"acc_norm_stderr\": 0.023517294335963286\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.24603174603174602,\n \"acc_stderr\": 0.03852273364924314,\n \"acc_norm\": 0.24603174603174602,\n \"acc_norm_stderr\": 0.03852273364924314\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.532258064516129,\n \"acc_stderr\": 0.028384747788813332,\n \"acc_norm\": 0.532258064516129,\n \"acc_norm_stderr\": 0.028384747788813332\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.35960591133004927,\n \"acc_stderr\": 0.03376458246509566,\n \"acc_norm\": 0.35960591133004927,\n \"acc_norm_stderr\": 0.03376458246509566\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.5696969696969697,\n \"acc_stderr\": 0.03866225962879077,\n \"acc_norm\": 0.5696969696969697,\n \"acc_norm_stderr\": 0.03866225962879077\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.5909090909090909,\n \"acc_stderr\": 0.03502975799413007,\n \"acc_norm\": 0.5909090909090909,\n \"acc_norm_stderr\": 0.03502975799413007\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.7098445595854922,\n \"acc_stderr\": 0.03275264467791516,\n \"acc_norm\": 0.7098445595854922,\n \"acc_norm_stderr\": 0.03275264467791516\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.4230769230769231,\n \"acc_stderr\": 0.02504919787604234,\n \"acc_norm\": 0.4230769230769231,\n \"acc_norm_stderr\": 0.02504919787604234\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.26666666666666666,\n \"acc_stderr\": 0.02696242432507383,\n \"acc_norm\": 0.26666666666666666,\n \"acc_norm_stderr\": 0.02696242432507383\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.4369747899159664,\n \"acc_stderr\": 0.03221943636566196,\n \"acc_norm\": 0.4369747899159664,\n \"acc_norm_stderr\": 0.03221943636566196\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.304635761589404,\n \"acc_stderr\": 0.03757949922943343,\n \"acc_norm\": 0.304635761589404,\n \"acc_norm_stderr\": 0.03757949922943343\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.671559633027523,\n \"acc_stderr\": 0.02013590279729841,\n \"acc_norm\": 0.671559633027523,\n \"acc_norm_stderr\": 0.02013590279729841\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.3287037037037037,\n \"acc_stderr\": 0.032036140846700596,\n \"acc_norm\": 0.3287037037037037,\n \"acc_norm_stderr\": 0.032036140846700596\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.6813725490196079,\n \"acc_stderr\": 0.032702871814820796,\n \"acc_norm\": 0.6813725490196079,\n \"acc_norm_stderr\": 0.032702871814820796\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.6708860759493671,\n \"acc_stderr\": 0.03058732629470237,\n \"acc_norm\": 0.6708860759493671,\n \"acc_norm_stderr\": 0.03058732629470237\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.5874439461883408,\n \"acc_stderr\": 0.03304062175449297,\n \"acc_norm\": 0.5874439461883408,\n \"acc_norm_stderr\": 0.03304062175449297\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.5877862595419847,\n \"acc_stderr\": 0.04317171194870255,\n \"acc_norm\": 0.5877862595419847,\n \"acc_norm_stderr\": 0.04317171194870255\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.6446280991735537,\n \"acc_stderr\": 0.0436923632657398,\n \"acc_norm\": 0.6446280991735537,\n \"acc_norm_stderr\": 0.0436923632657398\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6111111111111112,\n \"acc_stderr\": 0.04712821257426769,\n \"acc_norm\": 0.6111111111111112,\n \"acc_norm_stderr\": 0.04712821257426769\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.5521472392638037,\n \"acc_stderr\": 0.03906947479456606,\n \"acc_norm\": 0.5521472392638037,\n \"acc_norm_stderr\": 0.03906947479456606\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.3482142857142857,\n \"acc_stderr\": 0.04521829902833586,\n \"acc_norm\": 0.3482142857142857,\n \"acc_norm_stderr\": 0.04521829902833586\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.6893203883495146,\n \"acc_stderr\": 0.045821241601615506,\n \"acc_norm\": 0.6893203883495146,\n \"acc_norm_stderr\": 0.045821241601615506\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.02934311479809446,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.02934311479809446\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.6704980842911877,\n \"acc_stderr\": 0.01680832226174046,\n \"acc_norm\": 0.6704980842911877,\n \"acc_norm_stderr\": 0.01680832226174046\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.5202312138728323,\n \"acc_stderr\": 0.026897049996382875,\n \"acc_norm\": 0.5202312138728323,\n \"acc_norm_stderr\": 0.026897049996382875\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2223463687150838,\n \"acc_stderr\": 0.013907189208156881,\n \"acc_norm\": 0.2223463687150838,\n \"acc_norm_stderr\": 0.013907189208156881\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.5098039215686274,\n \"acc_stderr\": 0.028624412550167958,\n \"acc_norm\": 0.5098039215686274,\n \"acc_norm_stderr\": 0.028624412550167958\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.572347266881029,\n \"acc_stderr\": 0.02809924077580956,\n \"acc_norm\": 0.572347266881029,\n \"acc_norm_stderr\": 0.02809924077580956\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.5740740740740741,\n \"acc_stderr\": 0.027513747284379428,\n \"acc_norm\": 0.5740740740740741,\n \"acc_norm_stderr\": 0.027513747284379428\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.3723404255319149,\n \"acc_stderr\": 0.02883892147125146,\n \"acc_norm\": 0.3723404255319149,\n \"acc_norm_stderr\": 0.02883892147125146\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.34615384615384615,\n \"acc_stderr\": 0.012150699768228556,\n \"acc_norm\": 0.34615384615384615,\n \"acc_norm_stderr\": 0.012150699768228556\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.4742647058823529,\n \"acc_stderr\": 0.03033257809455504,\n \"acc_norm\": 0.4742647058823529,\n \"acc_norm_stderr\": 0.03033257809455504\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.4869281045751634,\n \"acc_stderr\": 0.020220920829626916,\n \"acc_norm\": 0.4869281045751634,\n \"acc_norm_stderr\": 0.020220920829626916\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.5272727272727272,\n \"acc_stderr\": 0.04782001791380061,\n \"acc_norm\": 0.5272727272727272,\n \"acc_norm_stderr\": 0.04782001791380061\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.5306122448979592,\n \"acc_stderr\": 0.031949171367580624,\n \"acc_norm\": 0.5306122448979592,\n \"acc_norm_stderr\": 0.031949171367580624\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6517412935323383,\n \"acc_stderr\": 0.033687874661154596,\n \"acc_norm\": 0.6517412935323383,\n \"acc_norm_stderr\": 0.033687874661154596\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.41566265060240964,\n \"acc_stderr\": 0.03836722176598052,\n \"acc_norm\": 0.41566265060240964,\n \"acc_norm_stderr\": 0.03836722176598052\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7251461988304093,\n \"acc_stderr\": 0.034240429246915824,\n \"acc_norm\": 0.7251461988304093,\n \"acc_norm_stderr\": 0.034240429246915824\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2876376988984088,\n \"mc1_stderr\": 0.015846315101394812,\n \"mc2\": 0.44729919889234016,\n \"mc2_stderr\": 0.015286276115878357\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7324388318863457,\n \"acc_stderr\": 0.01244171845689301\n },\n \"harness|drop|3\": {\n \"em\": 0.010906040268456376,\n \"em_stderr\": 0.0010636334198498001,\n \"f1\": 0.06768770973154396,\n \"f1_stderr\": 0.0017077194500790263\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.08642911296436695,\n \"acc_stderr\": 0.007740044337103787\n }\n}\n```", "repo_url": "https://huggingface.co/revolutionarybukhari/Llama-2-7b-chat-finetune-AUTOMATE", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|arc:challenge|25_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|drop|3_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|gsm8k|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hellaswag|10_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-management|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-virology|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-management|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-virology|5_2023-11-12T17-51-35.598056.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-management|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-virology|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|truthfulqa:mc|0_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["**/details_harness|winogrande|5_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-11-12T17-51-35.598056.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_11_12T17_51_35.598056", "path": ["results_2023-11-12T17-51-35.598056.parquet"]}, {"split": "latest", "path": ["results_2023-11-12T17-51-35.598056.parquet"]}]}]} | 2023-11-12T17:55:40+00:00 | []
| []
| TAGS
#region-us
|
# Dataset Card for Evaluation run of revolutionarybukhari/Llama-2-7b-chat-finetune-AUTOMATE
## Dataset Description
- Homepage:
- Repository: URL
- Paper:
- Leaderboard: URL
- Point of Contact: clementine@URL
### Dataset Summary
Dataset automatically created during the evaluation run of model revolutionarybukhari/Llama-2-7b-chat-finetune-AUTOMATE on the Open LLM Leaderboard.
The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).
To load the details from a run, you can for instance do the following:
## Latest results
These are the latest results from run 2023-11-12T17:51:35.598056(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
### Supported Tasks and Leaderboards
### Languages
## Dataset Structure
### Data Instances
### Data Fields
### Data Splits
## Dataset Creation
### Curation Rationale
### Source Data
#### Initial Data Collection and Normalization
#### Who are the source language producers?
### Annotations
#### Annotation process
#### Who are the annotators?
### Personal and Sensitive Information
## Considerations for Using the Data
### Social Impact of Dataset
### Discussion of Biases
### Other Known Limitations
## Additional Information
### Dataset Curators
### Licensing Information
### Contributions
| [
"# Dataset Card for Evaluation run of revolutionarybukhari/Llama-2-7b-chat-finetune-AUTOMATE",
"## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL",
"### Dataset Summary\n\nDataset automatically created during the evaluation run of model revolutionarybukhari/Llama-2-7b-chat-finetune-AUTOMATE on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:",
"## Latest results\n\nThese are the latest results from run 2023-11-12T17:51:35.598056(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):",
"### Supported Tasks and Leaderboards",
"### Languages",
"## Dataset Structure",
"### Data Instances",
"### Data Fields",
"### Data Splits",
"## Dataset Creation",
"### Curation Rationale",
"### Source Data",
"#### Initial Data Collection and Normalization",
"#### Who are the source language producers?",
"### Annotations",
"#### Annotation process",
"#### Who are the annotators?",
"### Personal and Sensitive Information",
"## Considerations for Using the Data",
"### Social Impact of Dataset",
"### Discussion of Biases",
"### Other Known Limitations",
"## Additional Information",
"### Dataset Curators",
"### Licensing Information",
"### Contributions"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for Evaluation run of revolutionarybukhari/Llama-2-7b-chat-finetune-AUTOMATE",
"## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL",
"### Dataset Summary\n\nDataset automatically created during the evaluation run of model revolutionarybukhari/Llama-2-7b-chat-finetune-AUTOMATE on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:",
"## Latest results\n\nThese are the latest results from run 2023-11-12T17:51:35.598056(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):",
"### Supported Tasks and Leaderboards",
"### Languages",
"## Dataset Structure",
"### Data Instances",
"### Data Fields",
"### Data Splits",
"## Dataset Creation",
"### Curation Rationale",
"### Source Data",
"#### Initial Data Collection and Normalization",
"#### Who are the source language producers?",
"### Annotations",
"#### Annotation process",
"#### Who are the annotators?",
"### Personal and Sensitive Information",
"## Considerations for Using the Data",
"### Social Impact of Dataset",
"### Discussion of Biases",
"### Other Known Limitations",
"## Additional Information",
"### Dataset Curators",
"### Licensing Information",
"### Contributions"
]
| [
6,
28,
31,
177,
67,
10,
4,
6,
6,
5,
5,
5,
7,
4,
10,
10,
5,
5,
9,
8,
8,
7,
8,
7,
5,
6,
6,
5
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of revolutionarybukhari/Llama-2-7b-chat-finetune-AUTOMATE## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model revolutionarybukhari/Llama-2-7b-chat-finetune-AUTOMATE on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-11-12T17:51:35.598056(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions"
]
|
1582a7cd081b4bd283caa67e2936504524a59086 |
# Bangumi Image Base of Gintama
This is the image base of bangumi Gintama, we detected 225 characters, 31678 images in total. The full dataset is [here](all.zip).
**Please note that these image bases are not guaranteed to be 100% cleaned, they may be noisy actual.** If you intend to manually train models using this dataset, we recommend performing necessary preprocessing on the downloaded dataset to eliminate potential noisy samples (approximately 1% probability).
Here is the characters' preview:
| # | Images | Download | Preview 1 | Preview 2 | Preview 3 | Preview 4 | Preview 5 | Preview 6 | Preview 7 | Preview 8 |
|:------|---------:|:----------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|
| 0 | 2887 | [Download](0/dataset.zip) |  |  |  |  |  |  |  |  |
| 1 | 268 | [Download](1/dataset.zip) |  |  |  |  |  |  |  |  |
| 2 | 32 | [Download](2/dataset.zip) |  |  |  |  |  |  |  |  |
| 3 | 1069 | [Download](3/dataset.zip) |  |  |  |  |  |  |  |  |
| 4 | 210 | [Download](4/dataset.zip) |  |  |  |  |  |  |  |  |
| 5 | 181 | [Download](5/dataset.zip) |  |  |  |  |  |  |  |  |
| 6 | 464 | [Download](6/dataset.zip) |  |  |  |  |  |  |  |  |
| 7 | 131 | [Download](7/dataset.zip) |  |  |  |  |  |  |  |  |
| 8 | 118 | [Download](8/dataset.zip) |  |  |  |  |  |  |  |  |
| 9 | 115 | [Download](9/dataset.zip) |  |  |  |  |  |  |  |  |
| 10 | 148 | [Download](10/dataset.zip) |  |  |  |  |  |  |  |  |
| 11 | 334 | [Download](11/dataset.zip) |  |  |  |  |  |  |  |  |
| 12 | 97 | [Download](12/dataset.zip) |  |  |  |  |  |  |  |  |
| 13 | 171 | [Download](13/dataset.zip) |  |  |  |  |  |  |  |  |
| 14 | 2819 | [Download](14/dataset.zip) |  |  |  |  |  |  |  |  |
| 15 | 1531 | [Download](15/dataset.zip) |  |  |  |  |  |  |  |  |
| 16 | 445 | [Download](16/dataset.zip) |  |  |  |  |  |  |  |  |
| 17 | 876 | [Download](17/dataset.zip) |  |  |  |  |  |  |  |  |
| 18 | 329 | [Download](18/dataset.zip) |  |  |  |  |  |  |  |  |
| 19 | 60 | [Download](19/dataset.zip) |  |  |  |  |  |  |  |  |
| 20 | 145 | [Download](20/dataset.zip) |  |  |  |  |  |  |  |  |
| 21 | 213 | [Download](21/dataset.zip) |  |  |  |  |  |  |  |  |
| 22 | 727 | [Download](22/dataset.zip) |  |  |  |  |  |  |  |  |
| 23 | 284 | [Download](23/dataset.zip) |  |  |  |  |  |  |  |  |
| 24 | 93 | [Download](24/dataset.zip) |  |  |  |  |  |  |  |  |
| 25 | 72 | [Download](25/dataset.zip) |  |  |  |  |  |  |  |  |
| 26 | 186 | [Download](26/dataset.zip) |  |  |  |  |  |  |  |  |
| 27 | 76 | [Download](27/dataset.zip) |  |  |  |  |  |  |  |  |
| 28 | 62 | [Download](28/dataset.zip) |  |  |  |  |  |  |  |  |
| 29 | 55 | [Download](29/dataset.zip) |  |  |  |  |  |  |  |  |
| 30 | 294 | [Download](30/dataset.zip) |  |  |  |  |  |  |  |  |
| 31 | 45 | [Download](31/dataset.zip) |  |  |  |  |  |  |  |  |
| 32 | 67 | [Download](32/dataset.zip) |  |  |  |  |  |  |  |  |
| 33 | 42 | [Download](33/dataset.zip) |  |  |  |  |  |  |  |  |
| 34 | 119 | [Download](34/dataset.zip) |  |  |  |  |  |  |  |  |
| 35 | 49 | [Download](35/dataset.zip) |  |  |  |  |  |  |  |  |
| 36 | 52 | [Download](36/dataset.zip) |  |  |  |  |  |  |  |  |
| 37 | 45 | [Download](37/dataset.zip) |  |  |  |  |  |  |  |  |
| 38 | 77 | [Download](38/dataset.zip) |  |  |  |  |  |  |  |  |
| 39 | 37 | [Download](39/dataset.zip) |  |  |  |  |  |  |  |  |
| 40 | 57 | [Download](40/dataset.zip) |  |  |  |  |  |  |  |  |
| 41 | 51 | [Download](41/dataset.zip) |  |  |  |  |  |  |  |  |
| 42 | 80 | [Download](42/dataset.zip) |  |  |  |  |  |  |  |  |
| 43 | 128 | [Download](43/dataset.zip) |  |  |  |  |  |  |  |  |
| 44 | 106 | [Download](44/dataset.zip) |  |  |  |  |  |  |  |  |
| 45 | 42 | [Download](45/dataset.zip) |  |  |  |  |  |  |  |  |
| 46 | 72 | [Download](46/dataset.zip) |  |  |  |  |  |  |  |  |
| 47 | 22 | [Download](47/dataset.zip) |  |  |  |  |  |  |  |  |
| 48 | 57 | [Download](48/dataset.zip) |  |  |  |  |  |  |  |  |
| 49 | 43 | [Download](49/dataset.zip) |  |  |  |  |  |  |  |  |
| 50 | 73 | [Download](50/dataset.zip) |  |  |  |  |  |  |  |  |
| 51 | 67 | [Download](51/dataset.zip) |  |  |  |  |  |  |  |  |
| 52 | 43 | [Download](52/dataset.zip) |  |  |  |  |  |  |  |  |
| 53 | 690 | [Download](53/dataset.zip) |  |  |  |  |  |  |  |  |
| 54 | 34 | [Download](54/dataset.zip) |  |  |  |  |  |  |  |  |
| 55 | 33 | [Download](55/dataset.zip) |  |  |  |  |  |  |  |  |
| 56 | 55 | [Download](56/dataset.zip) |  |  |  |  |  |  |  |  |
| 57 | 4546 | [Download](57/dataset.zip) |  |  |  |  |  |  |  |  |
| 58 | 145 | [Download](58/dataset.zip) |  |  |  |  |  |  |  |  |
| 59 | 170 | [Download](59/dataset.zip) |  |  |  |  |  |  |  |  |
| 60 | 28 | [Download](60/dataset.zip) |  |  |  |  |  |  |  |  |
| 61 | 63 | [Download](61/dataset.zip) |  |  |  |  |  |  |  |  |
| 62 | 41 | [Download](62/dataset.zip) |  |  |  |  |  |  |  |  |
| 63 | 49 | [Download](63/dataset.zip) |  |  |  |  |  |  |  |  |
| 64 | 36 | [Download](64/dataset.zip) |  |  |  |  |  |  |  |  |
| 65 | 41 | [Download](65/dataset.zip) |  |  |  |  |  |  |  |  |
| 66 | 25 | [Download](66/dataset.zip) |  |  |  |  |  |  |  |  |
| 67 | 62 | [Download](67/dataset.zip) |  |  |  |  |  |  |  |  |
| 68 | 39 | [Download](68/dataset.zip) |  |  |  |  |  |  |  |  |
| 69 | 40 | [Download](69/dataset.zip) |  |  |  |  |  |  |  |  |
| 70 | 28 | [Download](70/dataset.zip) |  |  |  |  |  |  |  |  |
| 71 | 23 | [Download](71/dataset.zip) |  |  |  |  |  |  |  |  |
| 72 | 96 | [Download](72/dataset.zip) |  |  |  |  |  |  |  |  |
| 73 | 93 | [Download](73/dataset.zip) |  |  |  |  |  |  |  |  |
| 74 | 61 | [Download](74/dataset.zip) |  |  |  |  |  |  |  |  |
| 75 | 29 | [Download](75/dataset.zip) |  |  |  |  |  |  |  |  |
| 76 | 69 | [Download](76/dataset.zip) |  |  |  |  |  |  |  |  |
| 77 | 36 | [Download](77/dataset.zip) |  |  |  |  |  |  |  |  |
| 78 | 55 | [Download](78/dataset.zip) |  |  |  |  |  |  |  |  |
| 79 | 43 | [Download](79/dataset.zip) |  |  |  |  |  |  |  |  |
| 80 | 46 | [Download](80/dataset.zip) |  |  |  |  |  |  |  |  |
| 81 | 32 | [Download](81/dataset.zip) |  |  |  |  |  |  |  |  |
| 82 | 48 | [Download](82/dataset.zip) |  |  |  |  |  |  |  |  |
| 83 | 77 | [Download](83/dataset.zip) |  |  |  |  |  |  |  |  |
| 84 | 35 | [Download](84/dataset.zip) |  |  |  |  |  |  |  |  |
| 85 | 44 | [Download](85/dataset.zip) |  |  |  |  |  |  |  |  |
| 86 | 33 | [Download](86/dataset.zip) |  |  |  |  |  |  |  |  |
| 87 | 38 | [Download](87/dataset.zip) |  |  |  |  |  |  |  |  |
| 88 | 58 | [Download](88/dataset.zip) |  |  |  |  |  |  |  |  |
| 89 | 112 | [Download](89/dataset.zip) |  |  |  |  |  |  |  |  |
| 90 | 33 | [Download](90/dataset.zip) |  |  |  |  |  |  |  |  |
| 91 | 73 | [Download](91/dataset.zip) |  |  |  |  |  |  |  |  |
| 92 | 254 | [Download](92/dataset.zip) |  |  |  |  |  |  |  |  |
| 93 | 68 | [Download](93/dataset.zip) |  |  |  |  |  |  |  |  |
| 94 | 27 | [Download](94/dataset.zip) |  |  |  |  |  |  |  |  |
| 95 | 44 | [Download](95/dataset.zip) |  |  |  |  |  |  |  |  |
| 96 | 88 | [Download](96/dataset.zip) |  |  |  |  |  |  |  |  |
| 97 | 45 | [Download](97/dataset.zip) |  |  |  |  |  |  |  |  |
| 98 | 16 | [Download](98/dataset.zip) |  |  |  |  |  |  |  |  |
| 99 | 266 | [Download](99/dataset.zip) |  |  |  |  |  |  |  |  |
| 100 | 458 | [Download](100/dataset.zip) |  |  |  |  |  |  |  |  |
| 101 | 17 | [Download](101/dataset.zip) |  |  |  |  |  |  |  |  |
| 102 | 67 | [Download](102/dataset.zip) |  |  |  |  |  |  |  |  |
| 103 | 47 | [Download](103/dataset.zip) |  |  |  |  |  |  |  |  |
| 104 | 65 | [Download](104/dataset.zip) |  |  |  |  |  |  |  |  |
| 105 | 22 | [Download](105/dataset.zip) |  |  |  |  |  |  |  |  |
| 106 | 21 | [Download](106/dataset.zip) |  |  |  |  |  |  |  |  |
| 107 | 299 | [Download](107/dataset.zip) |  |  |  |  |  |  |  |  |
| 108 | 19 | [Download](108/dataset.zip) |  |  |  |  |  |  |  |  |
| 109 | 19 | [Download](109/dataset.zip) |  |  |  |  |  |  |  |  |
| 110 | 27 | [Download](110/dataset.zip) |  |  |  |  |  |  |  |  |
| 111 | 24 | [Download](111/dataset.zip) |  |  |  |  |  |  |  |  |
| 112 | 50 | [Download](112/dataset.zip) |  |  |  |  |  |  |  |  |
| 113 | 61 | [Download](113/dataset.zip) |  |  |  |  |  |  |  |  |
| 114 | 39 | [Download](114/dataset.zip) |  |  |  |  |  |  |  |  |
| 115 | 53 | [Download](115/dataset.zip) |  |  |  |  |  |  |  |  |
| 116 | 49 | [Download](116/dataset.zip) |  |  |  |  |  |  |  |  |
| 117 | 36 | [Download](117/dataset.zip) |  |  |  |  |  |  |  |  |
| 118 | 20 | [Download](118/dataset.zip) |  |  |  |  |  |  |  |  |
| 119 | 70 | [Download](119/dataset.zip) |  |  |  |  |  |  |  |  |
| 120 | 23 | [Download](120/dataset.zip) |  |  |  |  |  |  |  |  |
| 121 | 2212 | [Download](121/dataset.zip) |  |  |  |  |  |  |  |  |
| 122 | 87 | [Download](122/dataset.zip) |  |  |  |  |  |  |  |  |
| 123 | 17 | [Download](123/dataset.zip) |  |  |  |  |  |  |  |  |
| 124 | 134 | [Download](124/dataset.zip) |  |  |  |  |  |  |  |  |
| 125 | 64 | [Download](125/dataset.zip) |  |  |  |  |  |  |  |  |
| 126 | 55 | [Download](126/dataset.zip) |  |  |  |  |  |  |  |  |
| 127 | 28 | [Download](127/dataset.zip) |  |  |  |  |  |  |  |  |
| 128 | 31 | [Download](128/dataset.zip) |  |  |  |  |  |  |  |  |
| 129 | 95 | [Download](129/dataset.zip) |  |  |  |  |  |  |  |  |
| 130 | 22 | [Download](130/dataset.zip) |  |  |  |  |  |  |  |  |
| 131 | 19 | [Download](131/dataset.zip) |  |  |  |  |  |  |  |  |
| 132 | 28 | [Download](132/dataset.zip) |  |  |  |  |  |  |  |  |
| 133 | 145 | [Download](133/dataset.zip) |  |  |  |  |  |  |  |  |
| 134 | 256 | [Download](134/dataset.zip) |  |  |  |  |  |  |  |  |
| 135 | 20 | [Download](135/dataset.zip) |  |  |  |  |  |  |  |  |
| 136 | 54 | [Download](136/dataset.zip) |  |  |  |  |  |  |  |  |
| 137 | 82 | [Download](137/dataset.zip) |  |  |  |  |  |  |  |  |
| 138 | 12 | [Download](138/dataset.zip) |  |  |  |  |  |  |  |  |
| 139 | 25 | [Download](139/dataset.zip) |  |  |  |  |  |  |  |  |
| 140 | 42 | [Download](140/dataset.zip) |  |  |  |  |  |  |  |  |
| 141 | 26 | [Download](141/dataset.zip) |  |  |  |  |  |  |  |  |
| 142 | 27 | [Download](142/dataset.zip) |  |  |  |  |  |  |  |  |
| 143 | 22 | [Download](143/dataset.zip) |  |  |  |  |  |  |  |  |
| 144 | 59 | [Download](144/dataset.zip) |  |  |  |  |  |  |  |  |
| 145 | 37 | [Download](145/dataset.zip) |  |  |  |  |  |  |  |  |
| 146 | 47 | [Download](146/dataset.zip) |  |  |  |  |  |  |  |  |
| 147 | 64 | [Download](147/dataset.zip) |  |  |  |  |  |  |  |  |
| 148 | 22 | [Download](148/dataset.zip) |  |  |  |  |  |  |  |  |
| 149 | 12 | [Download](149/dataset.zip) |  |  |  |  |  |  |  |  |
| 150 | 14 | [Download](150/dataset.zip) |  |  |  |  |  |  |  |  |
| 151 | 37 | [Download](151/dataset.zip) |  |  |  |  |  |  |  |  |
| 152 | 11 | [Download](152/dataset.zip) |  |  |  |  |  |  |  |  |
| 153 | 37 | [Download](153/dataset.zip) |  |  |  |  |  |  |  |  |
| 154 | 29 | [Download](154/dataset.zip) |  |  |  |  |  |  |  |  |
| 155 | 32 | [Download](155/dataset.zip) |  |  |  |  |  |  |  |  |
| 156 | 19 | [Download](156/dataset.zip) |  |  |  |  |  |  |  |  |
| 157 | 35 | [Download](157/dataset.zip) |  |  |  |  |  |  |  |  |
| 158 | 51 | [Download](158/dataset.zip) |  |  |  |  |  |  |  |  |
| 159 | 21 | [Download](159/dataset.zip) |  |  |  |  |  |  |  |  |
| 160 | 23 | [Download](160/dataset.zip) |  |  |  |  |  |  |  |  |
| 161 | 30 | [Download](161/dataset.zip) |  |  |  |  |  |  |  |  |
| 162 | 18 | [Download](162/dataset.zip) |  |  |  |  |  |  |  |  |
| 163 | 26 | [Download](163/dataset.zip) |  |  |  |  |  |  |  |  |
| 164 | 24 | [Download](164/dataset.zip) |  |  |  |  |  |  |  |  |
| 165 | 25 | [Download](165/dataset.zip) |  |  |  |  |  |  |  |  |
| 166 | 16 | [Download](166/dataset.zip) |  |  |  |  |  |  |  |  |
| 167 | 39 | [Download](167/dataset.zip) |  |  |  |  |  |  |  |  |
| 168 | 44 | [Download](168/dataset.zip) |  |  |  |  |  |  |  |  |
| 169 | 20 | [Download](169/dataset.zip) |  |  |  |  |  |  |  |  |
| 170 | 26 | [Download](170/dataset.zip) |  |  |  |  |  |  |  |  |
| 171 | 29 | [Download](171/dataset.zip) |  |  |  |  |  |  |  |  |
| 172 | 29 | [Download](172/dataset.zip) |  |  |  |  |  |  |  |  |
| 173 | 12 | [Download](173/dataset.zip) |  |  |  |  |  |  |  |  |
| 174 | 24 | [Download](174/dataset.zip) |  |  |  |  |  |  |  |  |
| 175 | 47 | [Download](175/dataset.zip) |  |  |  |  |  |  |  |  |
| 176 | 16 | [Download](176/dataset.zip) |  |  |  |  |  |  |  |  |
| 177 | 22 | [Download](177/dataset.zip) |  |  |  |  |  |  |  |  |
| 178 | 291 | [Download](178/dataset.zip) |  |  |  |  |  |  |  |  |
| 179 | 21 | [Download](179/dataset.zip) |  |  |  |  |  |  |  |  |
| 180 | 148 | [Download](180/dataset.zip) |  |  |  |  |  |  |  |  |
| 181 | 32 | [Download](181/dataset.zip) |  |  |  |  |  |  |  |  |
| 182 | 31 | [Download](182/dataset.zip) |  |  |  |  |  |  |  |  |
| 183 | 23 | [Download](183/dataset.zip) |  |  |  |  |  |  |  |  |
| 184 | 22 | [Download](184/dataset.zip) |  |  |  |  |  |  |  |  |
| 185 | 16 | [Download](185/dataset.zip) |  |  |  |  |  |  |  |  |
| 186 | 28 | [Download](186/dataset.zip) |  |  |  |  |  |  |  |  |
| 187 | 26 | [Download](187/dataset.zip) |  |  |  |  |  |  |  |  |
| 188 | 21 | [Download](188/dataset.zip) |  |  |  |  |  |  |  |  |
| 189 | 45 | [Download](189/dataset.zip) |  |  |  |  |  |  |  |  |
| 190 | 20 | [Download](190/dataset.zip) |  |  |  |  |  |  |  |  |
| 191 | 24 | [Download](191/dataset.zip) |  |  |  |  |  |  |  |  |
| 192 | 20 | [Download](192/dataset.zip) |  |  |  |  |  |  |  |  |
| 193 | 23 | [Download](193/dataset.zip) |  |  |  |  |  |  |  |  |
| 194 | 33 | [Download](194/dataset.zip) |  |  |  |  |  |  |  |  |
| 195 | 12 | [Download](195/dataset.zip) |  |  |  |  |  |  |  |  |
| 196 | 15 | [Download](196/dataset.zip) |  |  |  |  |  |  |  |  |
| 197 | 9 | [Download](197/dataset.zip) |  |  |  |  |  |  |  |  |
| 198 | 10 | [Download](198/dataset.zip) |  |  |  |  |  |  |  |  |
| 199 | 24 | [Download](199/dataset.zip) |  |  |  |  |  |  |  |  |
| 200 | 24 | [Download](200/dataset.zip) |  |  |  |  |  |  |  |  |
| 201 | 18 | [Download](201/dataset.zip) |  |  |  |  |  |  |  |  |
| 202 | 43 | [Download](202/dataset.zip) |  |  |  |  |  |  |  |  |
| 203 | 15 | [Download](203/dataset.zip) |  |  |  |  |  |  |  |  |
| 204 | 9 | [Download](204/dataset.zip) |  |  |  |  |  |  |  |  |
| 205 | 21 | [Download](205/dataset.zip) |  |  |  |  |  |  |  |  |
| 206 | 24 | [Download](206/dataset.zip) |  |  |  |  |  |  |  |  |
| 207 | 17 | [Download](207/dataset.zip) |  |  |  |  |  |  |  |  |
| 208 | 6 | [Download](208/dataset.zip) |  |  |  |  |  |  | N/A | N/A |
| 209 | 21 | [Download](209/dataset.zip) |  |  |  |  |  |  |  |  |
| 210 | 13 | [Download](210/dataset.zip) |  |  |  |  |  |  |  |  |
| 211 | 8 | [Download](211/dataset.zip) |  |  |  |  |  |  |  |  |
| 212 | 26 | [Download](212/dataset.zip) |  |  |  |  |  |  |  |  |
| 213 | 25 | [Download](213/dataset.zip) |  |  |  |  |  |  |  |  |
| 214 | 12 | [Download](214/dataset.zip) |  |  |  |  |  |  |  |  |
| 215 | 18 | [Download](215/dataset.zip) |  |  |  |  |  |  |  |  |
| 216 | 6 | [Download](216/dataset.zip) |  |  |  |  |  |  | N/A | N/A |
| 217 | 5 | [Download](217/dataset.zip) |  |  |  |  |  | N/A | N/A | N/A |
| 218 | 7 | [Download](218/dataset.zip) |  |  |  |  |  |  |  | N/A |
| 219 | 9 | [Download](219/dataset.zip) |  |  |  |  |  |  |  |  |
| 220 | 10 | [Download](220/dataset.zip) |  |  |  |  |  |  |  |  |
| 221 | 27 | [Download](221/dataset.zip) |  |  |  |  |  |  |  |  |
| 222 | 9 | [Download](222/dataset.zip) |  |  |  |  |  |  |  |  |
| 223 | 8 | [Download](223/dataset.zip) |  |  |  |  |  |  |  |  |
| noise | 484 | [Download](-1/dataset.zip) |  |  |  |  |  |  |  |  |
| BangumiBase/gintama | [
"size_categories:10K<n<100K",
"license:mit",
"art",
"region:us"
]
| 2023-11-12T18:12:25+00:00 | {"license": "mit", "size_categories": ["10K<n<100K"], "tags": ["art"]} | 2023-11-13T10:01:43+00:00 | []
| []
| TAGS
#size_categories-10K<n<100K #license-mit #art #region-us
| Bangumi Image Base of Gintama
=============================
This is the image base of bangumi Gintama, we detected 225 characters, 31678 images in total. The full dataset is here.
Please note that these image bases are not guaranteed to be 100% cleaned, they may be noisy actual. If you intend to manually train models using this dataset, we recommend performing necessary preprocessing on the downloaded dataset to eliminate potential noisy samples (approximately 1% probability).
Here is the characters' preview:
| []
| [
"TAGS\n#size_categories-10K<n<100K #license-mit #art #region-us \n"
]
| [
25
]
| [
"passage: TAGS\n#size_categories-10K<n<100K #license-mit #art #region-us \n"
]
|
0210646117e9422eef46088baacdc6eecafa6d4b |
# Dataset Card for Evaluation run of uukuguy/speechless-mistral-six-in-one-7b
## Dataset Description
- **Homepage:**
- **Repository:** https://huggingface.co/uukuguy/speechless-mistral-six-in-one-7b
- **Paper:**
- **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
- **Point of Contact:** [email protected]
### Dataset Summary
Dataset automatically created during the evaluation run of model [uukuguy/speechless-mistral-six-in-one-7b](https://huggingface.co/uukuguy/speechless-mistral-six-in-one-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_uukuguy__speechless-mistral-six-in-one-7b_public",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2023-11-12T18:14:50.698039](https://huggingface.co/datasets/open-llm-leaderboard/details_uukuguy__speechless-mistral-six-in-one-7b_public/blob/main/results_2023-11-12T18-14-50.698039.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.6276350372644707,
"acc_stderr": 0.03243221410411415,
"acc_norm": 0.636467872903276,
"acc_norm_stderr": 0.03312856166774958,
"mc1": 0.40514075887392903,
"mc1_stderr": 0.017185611727753368,
"mc2": 0.5776708582574724,
"mc2_stderr": 0.01544223129155929,
"em": 0.0041946308724832215,
"em_stderr": 0.0006618716168266571,
"f1": 0.09125943791946291,
"f1_stderr": 0.0018243790800558358
},
"harness|arc:challenge|25": {
"acc": 0.5981228668941979,
"acc_stderr": 0.014327268614578276,
"acc_norm": 0.6296928327645052,
"acc_norm_stderr": 0.01411129875167495
},
"harness|hellaswag|10": {
"acc": 0.652459669388568,
"acc_stderr": 0.004752158936871872,
"acc_norm": 0.8460466042620992,
"acc_norm_stderr": 0.00360166483871892
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.26,
"acc_stderr": 0.0440844002276808,
"acc_norm": 0.26,
"acc_norm_stderr": 0.0440844002276808
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.6222222222222222,
"acc_stderr": 0.04188307537595853,
"acc_norm": 0.6222222222222222,
"acc_norm_stderr": 0.04188307537595853
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.6578947368421053,
"acc_stderr": 0.03860731599316092,
"acc_norm": 0.6578947368421053,
"acc_norm_stderr": 0.03860731599316092
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.58,
"acc_stderr": 0.049604496374885836,
"acc_norm": 0.58,
"acc_norm_stderr": 0.049604496374885836
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.6792452830188679,
"acc_stderr": 0.02872750295788027,
"acc_norm": 0.6792452830188679,
"acc_norm_stderr": 0.02872750295788027
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.7361111111111112,
"acc_stderr": 0.03685651095897532,
"acc_norm": 0.7361111111111112,
"acc_norm_stderr": 0.03685651095897532
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.49,
"acc_stderr": 0.05024183937956911,
"acc_norm": 0.49,
"acc_norm_stderr": 0.05024183937956911
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.53,
"acc_stderr": 0.050161355804659205,
"acc_norm": 0.53,
"acc_norm_stderr": 0.050161355804659205
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.38,
"acc_stderr": 0.04878317312145633,
"acc_norm": 0.38,
"acc_norm_stderr": 0.04878317312145633
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.6184971098265896,
"acc_stderr": 0.03703851193099521,
"acc_norm": 0.6184971098265896,
"acc_norm_stderr": 0.03703851193099521
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.3627450980392157,
"acc_stderr": 0.04784060704105654,
"acc_norm": 0.3627450980392157,
"acc_norm_stderr": 0.04784060704105654
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.78,
"acc_stderr": 0.04163331998932261,
"acc_norm": 0.78,
"acc_norm_stderr": 0.04163331998932261
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5319148936170213,
"acc_stderr": 0.03261936918467382,
"acc_norm": 0.5319148936170213,
"acc_norm_stderr": 0.03261936918467382
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.45614035087719296,
"acc_stderr": 0.04685473041907789,
"acc_norm": 0.45614035087719296,
"acc_norm_stderr": 0.04685473041907789
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.6,
"acc_stderr": 0.040824829046386284,
"acc_norm": 0.6,
"acc_norm_stderr": 0.040824829046386284
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.4074074074074074,
"acc_stderr": 0.025305906241590626,
"acc_norm": 0.4074074074074074,
"acc_norm_stderr": 0.025305906241590626
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.4523809523809524,
"acc_stderr": 0.044518079590553275,
"acc_norm": 0.4523809523809524,
"acc_norm_stderr": 0.044518079590553275
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.7677419354838709,
"acc_stderr": 0.024022256130308235,
"acc_norm": 0.7677419354838709,
"acc_norm_stderr": 0.024022256130308235
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.5270935960591133,
"acc_stderr": 0.03512819077876106,
"acc_norm": 0.5270935960591133,
"acc_norm_stderr": 0.03512819077876106
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.67,
"acc_stderr": 0.04725815626252607,
"acc_norm": 0.67,
"acc_norm_stderr": 0.04725815626252607
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7575757575757576,
"acc_stderr": 0.03346409881055953,
"acc_norm": 0.7575757575757576,
"acc_norm_stderr": 0.03346409881055953
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.7828282828282829,
"acc_stderr": 0.02937661648494563,
"acc_norm": 0.7828282828282829,
"acc_norm_stderr": 0.02937661648494563
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8704663212435233,
"acc_stderr": 0.024233532297758723,
"acc_norm": 0.8704663212435233,
"acc_norm_stderr": 0.024233532297758723
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.6410256410256411,
"acc_stderr": 0.024321738484602354,
"acc_norm": 0.6410256410256411,
"acc_norm_stderr": 0.024321738484602354
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.34814814814814815,
"acc_stderr": 0.029045600290616255,
"acc_norm": 0.34814814814814815,
"acc_norm_stderr": 0.029045600290616255
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.6764705882352942,
"acc_stderr": 0.030388353551886783,
"acc_norm": 0.6764705882352942,
"acc_norm_stderr": 0.030388353551886783
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.3509933774834437,
"acc_stderr": 0.03896981964257375,
"acc_norm": 0.3509933774834437,
"acc_norm_stderr": 0.03896981964257375
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8385321100917431,
"acc_stderr": 0.015776239256163224,
"acc_norm": 0.8385321100917431,
"acc_norm_stderr": 0.015776239256163224
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.49537037037037035,
"acc_stderr": 0.03409825519163572,
"acc_norm": 0.49537037037037035,
"acc_norm_stderr": 0.03409825519163572
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.7990196078431373,
"acc_stderr": 0.028125972265654373,
"acc_norm": 0.7990196078431373,
"acc_norm_stderr": 0.028125972265654373
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.7637130801687764,
"acc_stderr": 0.02765215314415927,
"acc_norm": 0.7637130801687764,
"acc_norm_stderr": 0.02765215314415927
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.6905829596412556,
"acc_stderr": 0.03102441174057221,
"acc_norm": 0.6905829596412556,
"acc_norm_stderr": 0.03102441174057221
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.7557251908396947,
"acc_stderr": 0.03768335959728742,
"acc_norm": 0.7557251908396947,
"acc_norm_stderr": 0.03768335959728742
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.7851239669421488,
"acc_stderr": 0.037494924487096966,
"acc_norm": 0.7851239669421488,
"acc_norm_stderr": 0.037494924487096966
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.7870370370370371,
"acc_stderr": 0.03957835471980979,
"acc_norm": 0.7870370370370371,
"acc_norm_stderr": 0.03957835471980979
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.754601226993865,
"acc_stderr": 0.03380939813943354,
"acc_norm": 0.754601226993865,
"acc_norm_stderr": 0.03380939813943354
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.4642857142857143,
"acc_stderr": 0.04733667890053756,
"acc_norm": 0.4642857142857143,
"acc_norm_stderr": 0.04733667890053756
},
"harness|hendrycksTest-management|5": {
"acc": 0.7766990291262136,
"acc_stderr": 0.04123553189891431,
"acc_norm": 0.7766990291262136,
"acc_norm_stderr": 0.04123553189891431
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8888888888888888,
"acc_stderr": 0.020588491316092375,
"acc_norm": 0.8888888888888888,
"acc_norm_stderr": 0.020588491316092375
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.71,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.71,
"acc_norm_stderr": 0.045604802157206845
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8122605363984674,
"acc_stderr": 0.013964393769899136,
"acc_norm": 0.8122605363984674,
"acc_norm_stderr": 0.013964393769899136
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.708092485549133,
"acc_stderr": 0.024476994076247337,
"acc_norm": 0.708092485549133,
"acc_norm_stderr": 0.024476994076247337
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.41899441340782123,
"acc_stderr": 0.016501579306861677,
"acc_norm": 0.41899441340782123,
"acc_norm_stderr": 0.016501579306861677
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.7091503267973857,
"acc_stderr": 0.02600480036395213,
"acc_norm": 0.7091503267973857,
"acc_norm_stderr": 0.02600480036395213
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.6784565916398714,
"acc_stderr": 0.026527724079528872,
"acc_norm": 0.6784565916398714,
"acc_norm_stderr": 0.026527724079528872
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7006172839506173,
"acc_stderr": 0.025483115601195455,
"acc_norm": 0.7006172839506173,
"acc_norm_stderr": 0.025483115601195455
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.4574468085106383,
"acc_stderr": 0.029719281272236855,
"acc_norm": 0.4574468085106383,
"acc_norm_stderr": 0.029719281272236855
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.455019556714472,
"acc_stderr": 0.012718456618701773,
"acc_norm": 0.455019556714472,
"acc_norm_stderr": 0.012718456618701773
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6544117647058824,
"acc_stderr": 0.02888819310398863,
"acc_norm": 0.6544117647058824,
"acc_norm_stderr": 0.02888819310398863
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6388888888888888,
"acc_stderr": 0.01943177567703731,
"acc_norm": 0.6388888888888888,
"acc_norm_stderr": 0.01943177567703731
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6727272727272727,
"acc_stderr": 0.04494290866252089,
"acc_norm": 0.6727272727272727,
"acc_norm_stderr": 0.04494290866252089
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.7224489795918367,
"acc_stderr": 0.028666857790274648,
"acc_norm": 0.7224489795918367,
"acc_norm_stderr": 0.028666857790274648
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.8507462686567164,
"acc_stderr": 0.02519692987482707,
"acc_norm": 0.8507462686567164,
"acc_norm_stderr": 0.02519692987482707
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.82,
"acc_stderr": 0.03861229196653694,
"acc_norm": 0.82,
"acc_norm_stderr": 0.03861229196653694
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5421686746987951,
"acc_stderr": 0.0387862677100236,
"acc_norm": 0.5421686746987951,
"acc_norm_stderr": 0.0387862677100236
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.847953216374269,
"acc_stderr": 0.02753912288906145,
"acc_norm": 0.847953216374269,
"acc_norm_stderr": 0.02753912288906145
},
"harness|truthfulqa:mc|0": {
"mc1": 0.40514075887392903,
"mc1_stderr": 0.017185611727753368,
"mc2": 0.5776708582574724,
"mc2_stderr": 0.01544223129155929
},
"harness|winogrande|5": {
"acc": 0.7750591949486977,
"acc_stderr": 0.011735043564126735
},
"harness|drop|3": {
"em": 0.0041946308724832215,
"em_stderr": 0.0006618716168266571,
"f1": 0.09125943791946291,
"f1_stderr": 0.0018243790800558358
},
"harness|gsm8k|5": {
"acc": 0.18423047763457165,
"acc_stderr": 0.010678414428555008
}
}
```
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
[More Information Needed] | open-llm-leaderboard/details_uukuguy__speechless-mistral-six-in-one-7b | [
"region:us"
]
| 2023-11-12T18:17:49+00:00 | {"pretty_name": "Evaluation run of uukuguy/speechless-mistral-six-in-one-7b", "dataset_summary": "Dataset automatically created during the evaluation run of model [uukuguy/speechless-mistral-six-in-one-7b](https://huggingface.co/uukuguy/speechless-mistral-six-in-one-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_uukuguy__speechless-mistral-six-in-one-7b_public\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-11-12T18:14:50.698039](https://huggingface.co/datasets/open-llm-leaderboard/details_uukuguy__speechless-mistral-six-in-one-7b_public/blob/main/results_2023-11-12T18-14-50.698039.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6276350372644707,\n \"acc_stderr\": 0.03243221410411415,\n \"acc_norm\": 0.636467872903276,\n \"acc_norm_stderr\": 0.03312856166774958,\n \"mc1\": 0.40514075887392903,\n \"mc1_stderr\": 0.017185611727753368,\n \"mc2\": 0.5776708582574724,\n \"mc2_stderr\": 0.01544223129155929,\n \"em\": 0.0041946308724832215,\n \"em_stderr\": 0.0006618716168266571,\n \"f1\": 0.09125943791946291,\n \"f1_stderr\": 0.0018243790800558358\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5981228668941979,\n \"acc_stderr\": 0.014327268614578276,\n \"acc_norm\": 0.6296928327645052,\n \"acc_norm_stderr\": 0.01411129875167495\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.652459669388568,\n \"acc_stderr\": 0.004752158936871872,\n \"acc_norm\": 0.8460466042620992,\n \"acc_norm_stderr\": 0.00360166483871892\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6222222222222222,\n \"acc_stderr\": 0.04188307537595853,\n \"acc_norm\": 0.6222222222222222,\n \"acc_norm_stderr\": 0.04188307537595853\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6578947368421053,\n \"acc_stderr\": 0.03860731599316092,\n \"acc_norm\": 0.6578947368421053,\n \"acc_norm_stderr\": 0.03860731599316092\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.58,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.58,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6792452830188679,\n \"acc_stderr\": 0.02872750295788027,\n \"acc_norm\": 0.6792452830188679,\n \"acc_norm_stderr\": 0.02872750295788027\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7361111111111112,\n \"acc_stderr\": 0.03685651095897532,\n \"acc_norm\": 0.7361111111111112,\n \"acc_norm_stderr\": 0.03685651095897532\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956911,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956911\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145633,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145633\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6184971098265896,\n \"acc_stderr\": 0.03703851193099521,\n \"acc_norm\": 0.6184971098265896,\n \"acc_norm_stderr\": 0.03703851193099521\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.3627450980392157,\n \"acc_stderr\": 0.04784060704105654,\n \"acc_norm\": 0.3627450980392157,\n \"acc_norm_stderr\": 0.04784060704105654\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.78,\n \"acc_stderr\": 0.04163331998932261,\n \"acc_norm\": 0.78,\n \"acc_norm_stderr\": 0.04163331998932261\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5319148936170213,\n \"acc_stderr\": 0.03261936918467382,\n \"acc_norm\": 0.5319148936170213,\n \"acc_norm_stderr\": 0.03261936918467382\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.45614035087719296,\n \"acc_stderr\": 0.04685473041907789,\n \"acc_norm\": 0.45614035087719296,\n \"acc_norm_stderr\": 0.04685473041907789\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.040824829046386284,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.040824829046386284\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4074074074074074,\n \"acc_stderr\": 0.025305906241590626,\n \"acc_norm\": 0.4074074074074074,\n \"acc_norm_stderr\": 0.025305906241590626\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4523809523809524,\n \"acc_stderr\": 0.044518079590553275,\n \"acc_norm\": 0.4523809523809524,\n \"acc_norm_stderr\": 0.044518079590553275\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7677419354838709,\n \"acc_stderr\": 0.024022256130308235,\n \"acc_norm\": 0.7677419354838709,\n \"acc_norm_stderr\": 0.024022256130308235\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5270935960591133,\n \"acc_stderr\": 0.03512819077876106,\n \"acc_norm\": 0.5270935960591133,\n \"acc_norm_stderr\": 0.03512819077876106\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.67,\n \"acc_stderr\": 0.04725815626252607,\n \"acc_norm\": 0.67,\n \"acc_norm_stderr\": 0.04725815626252607\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7575757575757576,\n \"acc_stderr\": 0.03346409881055953,\n \"acc_norm\": 0.7575757575757576,\n \"acc_norm_stderr\": 0.03346409881055953\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7828282828282829,\n \"acc_stderr\": 0.02937661648494563,\n \"acc_norm\": 0.7828282828282829,\n \"acc_norm_stderr\": 0.02937661648494563\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8704663212435233,\n \"acc_stderr\": 0.024233532297758723,\n \"acc_norm\": 0.8704663212435233,\n \"acc_norm_stderr\": 0.024233532297758723\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6410256410256411,\n \"acc_stderr\": 0.024321738484602354,\n \"acc_norm\": 0.6410256410256411,\n \"acc_norm_stderr\": 0.024321738484602354\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.34814814814814815,\n \"acc_stderr\": 0.029045600290616255,\n \"acc_norm\": 0.34814814814814815,\n \"acc_norm_stderr\": 0.029045600290616255\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.030388353551886783,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.030388353551886783\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3509933774834437,\n \"acc_stderr\": 0.03896981964257375,\n \"acc_norm\": 0.3509933774834437,\n \"acc_norm_stderr\": 0.03896981964257375\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8385321100917431,\n \"acc_stderr\": 0.015776239256163224,\n \"acc_norm\": 0.8385321100917431,\n \"acc_norm_stderr\": 0.015776239256163224\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.49537037037037035,\n \"acc_stderr\": 0.03409825519163572,\n \"acc_norm\": 0.49537037037037035,\n \"acc_norm_stderr\": 0.03409825519163572\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7990196078431373,\n \"acc_stderr\": 0.028125972265654373,\n \"acc_norm\": 0.7990196078431373,\n \"acc_norm_stderr\": 0.028125972265654373\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7637130801687764,\n \"acc_stderr\": 0.02765215314415927,\n \"acc_norm\": 0.7637130801687764,\n \"acc_norm_stderr\": 0.02765215314415927\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6905829596412556,\n \"acc_stderr\": 0.03102441174057221,\n \"acc_norm\": 0.6905829596412556,\n \"acc_norm_stderr\": 0.03102441174057221\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7557251908396947,\n \"acc_stderr\": 0.03768335959728742,\n \"acc_norm\": 0.7557251908396947,\n \"acc_norm_stderr\": 0.03768335959728742\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7851239669421488,\n \"acc_stderr\": 0.037494924487096966,\n \"acc_norm\": 0.7851239669421488,\n \"acc_norm_stderr\": 0.037494924487096966\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7870370370370371,\n \"acc_stderr\": 0.03957835471980979,\n \"acc_norm\": 0.7870370370370371,\n \"acc_norm_stderr\": 0.03957835471980979\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.754601226993865,\n \"acc_stderr\": 0.03380939813943354,\n \"acc_norm\": 0.754601226993865,\n \"acc_norm_stderr\": 0.03380939813943354\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4642857142857143,\n \"acc_stderr\": 0.04733667890053756,\n \"acc_norm\": 0.4642857142857143,\n \"acc_norm_stderr\": 0.04733667890053756\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7766990291262136,\n \"acc_stderr\": 0.04123553189891431,\n \"acc_norm\": 0.7766990291262136,\n \"acc_norm_stderr\": 0.04123553189891431\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8888888888888888,\n \"acc_stderr\": 0.020588491316092375,\n \"acc_norm\": 0.8888888888888888,\n \"acc_norm_stderr\": 0.020588491316092375\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8122605363984674,\n \"acc_stderr\": 0.013964393769899136,\n \"acc_norm\": 0.8122605363984674,\n \"acc_norm_stderr\": 0.013964393769899136\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.708092485549133,\n \"acc_stderr\": 0.024476994076247337,\n \"acc_norm\": 0.708092485549133,\n \"acc_norm_stderr\": 0.024476994076247337\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.41899441340782123,\n \"acc_stderr\": 0.016501579306861677,\n \"acc_norm\": 0.41899441340782123,\n \"acc_norm_stderr\": 0.016501579306861677\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7091503267973857,\n \"acc_stderr\": 0.02600480036395213,\n \"acc_norm\": 0.7091503267973857,\n \"acc_norm_stderr\": 0.02600480036395213\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6784565916398714,\n \"acc_stderr\": 0.026527724079528872,\n \"acc_norm\": 0.6784565916398714,\n \"acc_norm_stderr\": 0.026527724079528872\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7006172839506173,\n \"acc_stderr\": 0.025483115601195455,\n \"acc_norm\": 0.7006172839506173,\n \"acc_norm_stderr\": 0.025483115601195455\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4574468085106383,\n \"acc_stderr\": 0.029719281272236855,\n \"acc_norm\": 0.4574468085106383,\n \"acc_norm_stderr\": 0.029719281272236855\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.455019556714472,\n \"acc_stderr\": 0.012718456618701773,\n \"acc_norm\": 0.455019556714472,\n \"acc_norm_stderr\": 0.012718456618701773\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6544117647058824,\n \"acc_stderr\": 0.02888819310398863,\n \"acc_norm\": 0.6544117647058824,\n \"acc_norm_stderr\": 0.02888819310398863\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6388888888888888,\n \"acc_stderr\": 0.01943177567703731,\n \"acc_norm\": 0.6388888888888888,\n \"acc_norm_stderr\": 0.01943177567703731\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.04494290866252089,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.04494290866252089\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7224489795918367,\n \"acc_stderr\": 0.028666857790274648,\n \"acc_norm\": 0.7224489795918367,\n \"acc_norm_stderr\": 0.028666857790274648\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8507462686567164,\n \"acc_stderr\": 0.02519692987482707,\n \"acc_norm\": 0.8507462686567164,\n \"acc_norm_stderr\": 0.02519692987482707\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.82,\n \"acc_stderr\": 0.03861229196653694,\n \"acc_norm\": 0.82,\n \"acc_norm_stderr\": 0.03861229196653694\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5421686746987951,\n \"acc_stderr\": 0.0387862677100236,\n \"acc_norm\": 0.5421686746987951,\n \"acc_norm_stderr\": 0.0387862677100236\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.847953216374269,\n \"acc_stderr\": 0.02753912288906145,\n \"acc_norm\": 0.847953216374269,\n \"acc_norm_stderr\": 0.02753912288906145\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.40514075887392903,\n \"mc1_stderr\": 0.017185611727753368,\n \"mc2\": 0.5776708582574724,\n \"mc2_stderr\": 0.01544223129155929\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7750591949486977,\n \"acc_stderr\": 0.011735043564126735\n },\n \"harness|drop|3\": {\n \"em\": 0.0041946308724832215,\n \"em_stderr\": 0.0006618716168266571,\n \"f1\": 0.09125943791946291,\n \"f1_stderr\": 0.0018243790800558358\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.18423047763457165,\n \"acc_stderr\": 0.010678414428555008\n }\n}\n```", "repo_url": "https://huggingface.co/uukuguy/speechless-mistral-six-in-one-7b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|arc:challenge|25_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|drop|3_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|gsm8k|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hellaswag|10_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-management|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-virology|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-management|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-virology|5_2023-11-12T18-14-50.698039.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-management|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-virology|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|truthfulqa:mc|0_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["**/details_harness|winogrande|5_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-11-12T18-14-50.698039.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_11_12T18_14_50.698039", "path": ["results_2023-11-12T18-14-50.698039.parquet"]}, {"split": "latest", "path": ["results_2023-11-12T18-14-50.698039.parquet"]}]}]} | 2023-11-12T18:18:50+00:00 | []
| []
| TAGS
#region-us
|
# Dataset Card for Evaluation run of uukuguy/speechless-mistral-six-in-one-7b
## Dataset Description
- Homepage:
- Repository: URL
- Paper:
- Leaderboard: URL
- Point of Contact: clementine@URL
### Dataset Summary
Dataset automatically created during the evaluation run of model uukuguy/speechless-mistral-six-in-one-7b on the Open LLM Leaderboard.
The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).
To load the details from a run, you can for instance do the following:
## Latest results
These are the latest results from run 2023-11-12T18:14:50.698039(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
### Supported Tasks and Leaderboards
### Languages
## Dataset Structure
### Data Instances
### Data Fields
### Data Splits
## Dataset Creation
### Curation Rationale
### Source Data
#### Initial Data Collection and Normalization
#### Who are the source language producers?
### Annotations
#### Annotation process
#### Who are the annotators?
### Personal and Sensitive Information
## Considerations for Using the Data
### Social Impact of Dataset
### Discussion of Biases
### Other Known Limitations
## Additional Information
### Dataset Curators
### Licensing Information
### Contributions
| [
"# Dataset Card for Evaluation run of uukuguy/speechless-mistral-six-in-one-7b",
"## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL",
"### Dataset Summary\n\nDataset automatically created during the evaluation run of model uukuguy/speechless-mistral-six-in-one-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:",
"## Latest results\n\nThese are the latest results from run 2023-11-12T18:14:50.698039(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):",
"### Supported Tasks and Leaderboards",
"### Languages",
"## Dataset Structure",
"### Data Instances",
"### Data Fields",
"### Data Splits",
"## Dataset Creation",
"### Curation Rationale",
"### Source Data",
"#### Initial Data Collection and Normalization",
"#### Who are the source language producers?",
"### Annotations",
"#### Annotation process",
"#### Who are the annotators?",
"### Personal and Sensitive Information",
"## Considerations for Using the Data",
"### Social Impact of Dataset",
"### Discussion of Biases",
"### Other Known Limitations",
"## Additional Information",
"### Dataset Curators",
"### Licensing Information",
"### Contributions"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for Evaluation run of uukuguy/speechless-mistral-six-in-one-7b",
"## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL",
"### Dataset Summary\n\nDataset automatically created during the evaluation run of model uukuguy/speechless-mistral-six-in-one-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:",
"## Latest results\n\nThese are the latest results from run 2023-11-12T18:14:50.698039(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):",
"### Supported Tasks and Leaderboards",
"### Languages",
"## Dataset Structure",
"### Data Instances",
"### Data Fields",
"### Data Splits",
"## Dataset Creation",
"### Curation Rationale",
"### Source Data",
"#### Initial Data Collection and Normalization",
"#### Who are the source language producers?",
"### Annotations",
"#### Annotation process",
"#### Who are the annotators?",
"### Personal and Sensitive Information",
"## Considerations for Using the Data",
"### Social Impact of Dataset",
"### Discussion of Biases",
"### Other Known Limitations",
"## Additional Information",
"### Dataset Curators",
"### Licensing Information",
"### Contributions"
]
| [
6,
29,
31,
178,
67,
10,
4,
6,
6,
5,
5,
5,
7,
4,
10,
10,
5,
5,
9,
8,
8,
7,
8,
7,
5,
6,
6,
5
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of uukuguy/speechless-mistral-six-in-one-7b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model uukuguy/speechless-mistral-six-in-one-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-11-12T18:14:50.698039(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions"
]
|
39dedd3569065ff4fded0db57d4c7f3014537fd5 |
# Dataset Card for Evaluation run of PulsarAI/SlimOpenOrca-Mistral-7B-v2
## Dataset Description
- **Homepage:**
- **Repository:** https://huggingface.co/PulsarAI/SlimOpenOrca-Mistral-7B-v2
- **Paper:**
- **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
- **Point of Contact:** [email protected]
### Dataset Summary
Dataset automatically created during the evaluation run of model [PulsarAI/SlimOpenOrca-Mistral-7B-v2](https://huggingface.co/PulsarAI/SlimOpenOrca-Mistral-7B-v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_PulsarAI__SlimOpenOrca-Mistral-7B-v2_public",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2023-11-12T18:15:51.369317](https://huggingface.co/datasets/open-llm-leaderboard/details_PulsarAI__SlimOpenOrca-Mistral-7B-v2_public/blob/main/results_2023-11-12T18-15-51.369317.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.6159393027066592,
"acc_stderr": 0.032593338844127864,
"acc_norm": 0.6242559279403389,
"acc_norm_stderr": 0.03329458303258477,
"mc1": 0.3929008567931457,
"mc1_stderr": 0.017097248285233065,
"mc2": 0.5664808334981362,
"mc2_stderr": 0.015491636686254535,
"em": 0.004718959731543624,
"em_stderr": 0.0007018360183131115,
"f1": 0.09190750838926176,
"f1_stderr": 0.0018302287340192876
},
"harness|arc:challenge|25": {
"acc": 0.5938566552901023,
"acc_stderr": 0.014351656690097858,
"acc_norm": 0.628839590443686,
"acc_norm_stderr": 0.014117971901142824
},
"harness|hellaswag|10": {
"acc": 0.6448914558852819,
"acc_stderr": 0.004775681871529862,
"acc_norm": 0.8340967934674368,
"acc_norm_stderr": 0.003712334763856884
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542128,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542128
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.5851851851851851,
"acc_stderr": 0.04256193767901408,
"acc_norm": 0.5851851851851851,
"acc_norm_stderr": 0.04256193767901408
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.6907894736842105,
"acc_stderr": 0.037610708698674805,
"acc_norm": 0.6907894736842105,
"acc_norm_stderr": 0.037610708698674805
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.61,
"acc_stderr": 0.04902071300001975,
"acc_norm": 0.61,
"acc_norm_stderr": 0.04902071300001975
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.6754716981132075,
"acc_stderr": 0.028815615713432108,
"acc_norm": 0.6754716981132075,
"acc_norm_stderr": 0.028815615713432108
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.7361111111111112,
"acc_stderr": 0.03685651095897532,
"acc_norm": 0.7361111111111112,
"acc_norm_stderr": 0.03685651095897532
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.51,
"acc_stderr": 0.05024183937956912,
"acc_norm": 0.51,
"acc_norm_stderr": 0.05024183937956912
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.53,
"acc_stderr": 0.05016135580465919,
"acc_norm": 0.53,
"acc_norm_stderr": 0.05016135580465919
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.37,
"acc_stderr": 0.04852365870939099,
"acc_norm": 0.37,
"acc_norm_stderr": 0.04852365870939099
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.5549132947976878,
"acc_stderr": 0.03789401760283648,
"acc_norm": 0.5549132947976878,
"acc_norm_stderr": 0.03789401760283648
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.38235294117647056,
"acc_stderr": 0.04835503696107223,
"acc_norm": 0.38235294117647056,
"acc_norm_stderr": 0.04835503696107223
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.77,
"acc_stderr": 0.04229525846816505,
"acc_norm": 0.77,
"acc_norm_stderr": 0.04229525846816505
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5319148936170213,
"acc_stderr": 0.03261936918467381,
"acc_norm": 0.5319148936170213,
"acc_norm_stderr": 0.03261936918467381
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.42105263157894735,
"acc_stderr": 0.046446020912223177,
"acc_norm": 0.42105263157894735,
"acc_norm_stderr": 0.046446020912223177
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5586206896551724,
"acc_stderr": 0.04137931034482758,
"acc_norm": 0.5586206896551724,
"acc_norm_stderr": 0.04137931034482758
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.41534391534391535,
"acc_stderr": 0.02537952491077839,
"acc_norm": 0.41534391534391535,
"acc_norm_stderr": 0.02537952491077839
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.4523809523809524,
"acc_stderr": 0.044518079590553275,
"acc_norm": 0.4523809523809524,
"acc_norm_stderr": 0.044518079590553275
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.27,
"acc_stderr": 0.0446196043338474,
"acc_norm": 0.27,
"acc_norm_stderr": 0.0446196043338474
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.7483870967741936,
"acc_stderr": 0.024685979286239963,
"acc_norm": 0.7483870967741936,
"acc_norm_stderr": 0.024685979286239963
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.4630541871921182,
"acc_stderr": 0.035083705204426656,
"acc_norm": 0.4630541871921182,
"acc_norm_stderr": 0.035083705204426656
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.72,
"acc_stderr": 0.04512608598542127,
"acc_norm": 0.72,
"acc_norm_stderr": 0.04512608598542127
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7575757575757576,
"acc_stderr": 0.03346409881055953,
"acc_norm": 0.7575757575757576,
"acc_norm_stderr": 0.03346409881055953
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.7575757575757576,
"acc_stderr": 0.030532892233932022,
"acc_norm": 0.7575757575757576,
"acc_norm_stderr": 0.030532892233932022
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8549222797927462,
"acc_stderr": 0.02541634309630645,
"acc_norm": 0.8549222797927462,
"acc_norm_stderr": 0.02541634309630645
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.5974358974358974,
"acc_stderr": 0.02486499515976775,
"acc_norm": 0.5974358974358974,
"acc_norm_stderr": 0.02486499515976775
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.35555555555555557,
"acc_stderr": 0.029185714949857413,
"acc_norm": 0.35555555555555557,
"acc_norm_stderr": 0.029185714949857413
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.6386554621848739,
"acc_stderr": 0.031204691225150016,
"acc_norm": 0.6386554621848739,
"acc_norm_stderr": 0.031204691225150016
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.271523178807947,
"acc_stderr": 0.03631329803969653,
"acc_norm": 0.271523178807947,
"acc_norm_stderr": 0.03631329803969653
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8311926605504587,
"acc_stderr": 0.01606005626853035,
"acc_norm": 0.8311926605504587,
"acc_norm_stderr": 0.01606005626853035
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.49074074074074076,
"acc_stderr": 0.034093869469927006,
"acc_norm": 0.49074074074074076,
"acc_norm_stderr": 0.034093869469927006
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.8137254901960784,
"acc_stderr": 0.027325470966716312,
"acc_norm": 0.8137254901960784,
"acc_norm_stderr": 0.027325470966716312
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.7890295358649789,
"acc_stderr": 0.02655837250266192,
"acc_norm": 0.7890295358649789,
"acc_norm_stderr": 0.02655837250266192
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.6547085201793722,
"acc_stderr": 0.03191100192835794,
"acc_norm": 0.6547085201793722,
"acc_norm_stderr": 0.03191100192835794
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.7633587786259542,
"acc_stderr": 0.03727673575596915,
"acc_norm": 0.7633587786259542,
"acc_norm_stderr": 0.03727673575596915
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.7933884297520661,
"acc_stderr": 0.036959801280988226,
"acc_norm": 0.7933884297520661,
"acc_norm_stderr": 0.036959801280988226
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.7314814814814815,
"acc_stderr": 0.042844679680521934,
"acc_norm": 0.7314814814814815,
"acc_norm_stderr": 0.042844679680521934
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.7423312883435583,
"acc_stderr": 0.03436150827846917,
"acc_norm": 0.7423312883435583,
"acc_norm_stderr": 0.03436150827846917
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.5089285714285714,
"acc_stderr": 0.04745033255489123,
"acc_norm": 0.5089285714285714,
"acc_norm_stderr": 0.04745033255489123
},
"harness|hendrycksTest-management|5": {
"acc": 0.7961165048543689,
"acc_stderr": 0.039891398595317706,
"acc_norm": 0.7961165048543689,
"acc_norm_stderr": 0.039891398595317706
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8547008547008547,
"acc_stderr": 0.023086635086841407,
"acc_norm": 0.8547008547008547,
"acc_norm_stderr": 0.023086635086841407
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.74,
"acc_stderr": 0.04408440022768078,
"acc_norm": 0.74,
"acc_norm_stderr": 0.04408440022768078
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.80970625798212,
"acc_stderr": 0.014036945850381401,
"acc_norm": 0.80970625798212,
"acc_norm_stderr": 0.014036945850381401
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.6878612716763006,
"acc_stderr": 0.024946792225272314,
"acc_norm": 0.6878612716763006,
"acc_norm_stderr": 0.024946792225272314
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.3474860335195531,
"acc_stderr": 0.01592556406020815,
"acc_norm": 0.3474860335195531,
"acc_norm_stderr": 0.01592556406020815
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.6993464052287581,
"acc_stderr": 0.026256053835718964,
"acc_norm": 0.6993464052287581,
"acc_norm_stderr": 0.026256053835718964
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.6655948553054662,
"acc_stderr": 0.026795422327893937,
"acc_norm": 0.6655948553054662,
"acc_norm_stderr": 0.026795422327893937
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7160493827160493,
"acc_stderr": 0.025089478523765134,
"acc_norm": 0.7160493827160493,
"acc_norm_stderr": 0.025089478523765134
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.43617021276595747,
"acc_stderr": 0.02958345203628407,
"acc_norm": 0.43617021276595747,
"acc_norm_stderr": 0.02958345203628407
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.4530638852672751,
"acc_stderr": 0.012713845972358978,
"acc_norm": 0.4530638852672751,
"acc_norm_stderr": 0.012713845972358978
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6066176470588235,
"acc_stderr": 0.029674288281311155,
"acc_norm": 0.6066176470588235,
"acc_norm_stderr": 0.029674288281311155
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6421568627450981,
"acc_stderr": 0.019393058402355442,
"acc_norm": 0.6421568627450981,
"acc_norm_stderr": 0.019393058402355442
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6363636363636364,
"acc_stderr": 0.04607582090719976,
"acc_norm": 0.6363636363636364,
"acc_norm_stderr": 0.04607582090719976
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.7142857142857143,
"acc_stderr": 0.028920583220675606,
"acc_norm": 0.7142857142857143,
"acc_norm_stderr": 0.028920583220675606
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.8258706467661692,
"acc_stderr": 0.026814951200421603,
"acc_norm": 0.8258706467661692,
"acc_norm_stderr": 0.026814951200421603
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.79,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.79,
"acc_norm_stderr": 0.040936018074033256
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5120481927710844,
"acc_stderr": 0.03891364495835817,
"acc_norm": 0.5120481927710844,
"acc_norm_stderr": 0.03891364495835817
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8070175438596491,
"acc_stderr": 0.030267457554898458,
"acc_norm": 0.8070175438596491,
"acc_norm_stderr": 0.030267457554898458
},
"harness|truthfulqa:mc|0": {
"mc1": 0.3929008567931457,
"mc1_stderr": 0.017097248285233065,
"mc2": 0.5664808334981362,
"mc2_stderr": 0.015491636686254535
},
"harness|winogrande|5": {
"acc": 0.7758484609313339,
"acc_stderr": 0.011720400740774099
},
"harness|drop|3": {
"em": 0.004718959731543624,
"em_stderr": 0.0007018360183131115,
"f1": 0.09190750838926176,
"f1_stderr": 0.0018302287340192876
},
"harness|gsm8k|5": {
"acc": 0.18953752843062927,
"acc_stderr": 0.010795837931896387
}
}
```
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
[More Information Needed] | open-llm-leaderboard/details_PulsarAI__SlimOpenOrca-Mistral-7B-v2 | [
"region:us"
]
| 2023-11-12T18:18:53+00:00 | {"pretty_name": "Evaluation run of PulsarAI/SlimOpenOrca-Mistral-7B-v2", "dataset_summary": "Dataset automatically created during the evaluation run of model [PulsarAI/SlimOpenOrca-Mistral-7B-v2](https://huggingface.co/PulsarAI/SlimOpenOrca-Mistral-7B-v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_PulsarAI__SlimOpenOrca-Mistral-7B-v2_public\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-11-12T18:15:51.369317](https://huggingface.co/datasets/open-llm-leaderboard/details_PulsarAI__SlimOpenOrca-Mistral-7B-v2_public/blob/main/results_2023-11-12T18-15-51.369317.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6159393027066592,\n \"acc_stderr\": 0.032593338844127864,\n \"acc_norm\": 0.6242559279403389,\n \"acc_norm_stderr\": 0.03329458303258477,\n \"mc1\": 0.3929008567931457,\n \"mc1_stderr\": 0.017097248285233065,\n \"mc2\": 0.5664808334981362,\n \"mc2_stderr\": 0.015491636686254535,\n \"em\": 0.004718959731543624,\n \"em_stderr\": 0.0007018360183131115,\n \"f1\": 0.09190750838926176,\n \"f1_stderr\": 0.0018302287340192876\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5938566552901023,\n \"acc_stderr\": 0.014351656690097858,\n \"acc_norm\": 0.628839590443686,\n \"acc_norm_stderr\": 0.014117971901142824\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6448914558852819,\n \"acc_stderr\": 0.004775681871529862,\n \"acc_norm\": 0.8340967934674368,\n \"acc_norm_stderr\": 0.003712334763856884\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5851851851851851,\n \"acc_stderr\": 0.04256193767901408,\n \"acc_norm\": 0.5851851851851851,\n \"acc_norm_stderr\": 0.04256193767901408\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6907894736842105,\n \"acc_stderr\": 0.037610708698674805,\n \"acc_norm\": 0.6907894736842105,\n \"acc_norm_stderr\": 0.037610708698674805\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6754716981132075,\n \"acc_stderr\": 0.028815615713432108,\n \"acc_norm\": 0.6754716981132075,\n \"acc_norm_stderr\": 0.028815615713432108\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7361111111111112,\n \"acc_stderr\": 0.03685651095897532,\n \"acc_norm\": 0.7361111111111112,\n \"acc_norm_stderr\": 0.03685651095897532\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5549132947976878,\n \"acc_stderr\": 0.03789401760283648,\n \"acc_norm\": 0.5549132947976878,\n \"acc_norm_stderr\": 0.03789401760283648\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.38235294117647056,\n \"acc_stderr\": 0.04835503696107223,\n \"acc_norm\": 0.38235294117647056,\n \"acc_norm_stderr\": 0.04835503696107223\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.04229525846816505,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.04229525846816505\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5319148936170213,\n \"acc_stderr\": 0.03261936918467381,\n \"acc_norm\": 0.5319148936170213,\n \"acc_norm_stderr\": 0.03261936918467381\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.42105263157894735,\n \"acc_stderr\": 0.046446020912223177,\n \"acc_norm\": 0.42105263157894735,\n \"acc_norm_stderr\": 0.046446020912223177\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5586206896551724,\n \"acc_stderr\": 0.04137931034482758,\n \"acc_norm\": 0.5586206896551724,\n \"acc_norm_stderr\": 0.04137931034482758\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.41534391534391535,\n \"acc_stderr\": 0.02537952491077839,\n \"acc_norm\": 0.41534391534391535,\n \"acc_norm_stderr\": 0.02537952491077839\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4523809523809524,\n \"acc_stderr\": 0.044518079590553275,\n \"acc_norm\": 0.4523809523809524,\n \"acc_norm_stderr\": 0.044518079590553275\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.0446196043338474,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.0446196043338474\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7483870967741936,\n \"acc_stderr\": 0.024685979286239963,\n \"acc_norm\": 0.7483870967741936,\n \"acc_norm_stderr\": 0.024685979286239963\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4630541871921182,\n \"acc_stderr\": 0.035083705204426656,\n \"acc_norm\": 0.4630541871921182,\n \"acc_norm_stderr\": 0.035083705204426656\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7575757575757576,\n \"acc_stderr\": 0.03346409881055953,\n \"acc_norm\": 0.7575757575757576,\n \"acc_norm_stderr\": 0.03346409881055953\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7575757575757576,\n \"acc_stderr\": 0.030532892233932022,\n \"acc_norm\": 0.7575757575757576,\n \"acc_norm_stderr\": 0.030532892233932022\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8549222797927462,\n \"acc_stderr\": 0.02541634309630645,\n \"acc_norm\": 0.8549222797927462,\n \"acc_norm_stderr\": 0.02541634309630645\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5974358974358974,\n \"acc_stderr\": 0.02486499515976775,\n \"acc_norm\": 0.5974358974358974,\n \"acc_norm_stderr\": 0.02486499515976775\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.35555555555555557,\n \"acc_stderr\": 0.029185714949857413,\n \"acc_norm\": 0.35555555555555557,\n \"acc_norm_stderr\": 0.029185714949857413\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6386554621848739,\n \"acc_stderr\": 0.031204691225150016,\n \"acc_norm\": 0.6386554621848739,\n \"acc_norm_stderr\": 0.031204691225150016\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.271523178807947,\n \"acc_stderr\": 0.03631329803969653,\n \"acc_norm\": 0.271523178807947,\n \"acc_norm_stderr\": 0.03631329803969653\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8311926605504587,\n \"acc_stderr\": 0.01606005626853035,\n \"acc_norm\": 0.8311926605504587,\n \"acc_norm_stderr\": 0.01606005626853035\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.49074074074074076,\n \"acc_stderr\": 0.034093869469927006,\n \"acc_norm\": 0.49074074074074076,\n \"acc_norm_stderr\": 0.034093869469927006\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8137254901960784,\n \"acc_stderr\": 0.027325470966716312,\n \"acc_norm\": 0.8137254901960784,\n \"acc_norm_stderr\": 0.027325470966716312\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7890295358649789,\n \"acc_stderr\": 0.02655837250266192,\n \"acc_norm\": 0.7890295358649789,\n \"acc_norm_stderr\": 0.02655837250266192\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6547085201793722,\n \"acc_stderr\": 0.03191100192835794,\n \"acc_norm\": 0.6547085201793722,\n \"acc_norm_stderr\": 0.03191100192835794\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7633587786259542,\n \"acc_stderr\": 0.03727673575596915,\n \"acc_norm\": 0.7633587786259542,\n \"acc_norm_stderr\": 0.03727673575596915\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7933884297520661,\n \"acc_stderr\": 0.036959801280988226,\n \"acc_norm\": 0.7933884297520661,\n \"acc_norm_stderr\": 0.036959801280988226\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7314814814814815,\n \"acc_stderr\": 0.042844679680521934,\n \"acc_norm\": 0.7314814814814815,\n \"acc_norm_stderr\": 0.042844679680521934\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7423312883435583,\n \"acc_stderr\": 0.03436150827846917,\n \"acc_norm\": 0.7423312883435583,\n \"acc_norm_stderr\": 0.03436150827846917\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5089285714285714,\n \"acc_stderr\": 0.04745033255489123,\n \"acc_norm\": 0.5089285714285714,\n \"acc_norm_stderr\": 0.04745033255489123\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7961165048543689,\n \"acc_stderr\": 0.039891398595317706,\n \"acc_norm\": 0.7961165048543689,\n \"acc_norm_stderr\": 0.039891398595317706\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8547008547008547,\n \"acc_stderr\": 0.023086635086841407,\n \"acc_norm\": 0.8547008547008547,\n \"acc_norm_stderr\": 0.023086635086841407\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768078,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768078\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.80970625798212,\n \"acc_stderr\": 0.014036945850381401,\n \"acc_norm\": 0.80970625798212,\n \"acc_norm_stderr\": 0.014036945850381401\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6878612716763006,\n \"acc_stderr\": 0.024946792225272314,\n \"acc_norm\": 0.6878612716763006,\n \"acc_norm_stderr\": 0.024946792225272314\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3474860335195531,\n \"acc_stderr\": 0.01592556406020815,\n \"acc_norm\": 0.3474860335195531,\n \"acc_norm_stderr\": 0.01592556406020815\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6993464052287581,\n \"acc_stderr\": 0.026256053835718964,\n \"acc_norm\": 0.6993464052287581,\n \"acc_norm_stderr\": 0.026256053835718964\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6655948553054662,\n \"acc_stderr\": 0.026795422327893937,\n \"acc_norm\": 0.6655948553054662,\n \"acc_norm_stderr\": 0.026795422327893937\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7160493827160493,\n \"acc_stderr\": 0.025089478523765134,\n \"acc_norm\": 0.7160493827160493,\n \"acc_norm_stderr\": 0.025089478523765134\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.43617021276595747,\n \"acc_stderr\": 0.02958345203628407,\n \"acc_norm\": 0.43617021276595747,\n \"acc_norm_stderr\": 0.02958345203628407\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4530638852672751,\n \"acc_stderr\": 0.012713845972358978,\n \"acc_norm\": 0.4530638852672751,\n \"acc_norm_stderr\": 0.012713845972358978\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6066176470588235,\n \"acc_stderr\": 0.029674288281311155,\n \"acc_norm\": 0.6066176470588235,\n \"acc_norm_stderr\": 0.029674288281311155\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6421568627450981,\n \"acc_stderr\": 0.019393058402355442,\n \"acc_norm\": 0.6421568627450981,\n \"acc_norm_stderr\": 0.019393058402355442\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6363636363636364,\n \"acc_stderr\": 0.04607582090719976,\n \"acc_norm\": 0.6363636363636364,\n \"acc_norm_stderr\": 0.04607582090719976\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7142857142857143,\n \"acc_stderr\": 0.028920583220675606,\n \"acc_norm\": 0.7142857142857143,\n \"acc_norm_stderr\": 0.028920583220675606\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8258706467661692,\n \"acc_stderr\": 0.026814951200421603,\n \"acc_norm\": 0.8258706467661692,\n \"acc_norm_stderr\": 0.026814951200421603\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.79,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.79,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5120481927710844,\n \"acc_stderr\": 0.03891364495835817,\n \"acc_norm\": 0.5120481927710844,\n \"acc_norm_stderr\": 0.03891364495835817\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8070175438596491,\n \"acc_stderr\": 0.030267457554898458,\n \"acc_norm\": 0.8070175438596491,\n \"acc_norm_stderr\": 0.030267457554898458\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3929008567931457,\n \"mc1_stderr\": 0.017097248285233065,\n \"mc2\": 0.5664808334981362,\n \"mc2_stderr\": 0.015491636686254535\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7758484609313339,\n \"acc_stderr\": 0.011720400740774099\n },\n \"harness|drop|3\": {\n \"em\": 0.004718959731543624,\n \"em_stderr\": 0.0007018360183131115,\n \"f1\": 0.09190750838926176,\n \"f1_stderr\": 0.0018302287340192876\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.18953752843062927,\n \"acc_stderr\": 0.010795837931896387\n }\n}\n```", "repo_url": "https://huggingface.co/PulsarAI/SlimOpenOrca-Mistral-7B-v2", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|arc:challenge|25_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|drop|3_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|gsm8k|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hellaswag|10_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-management|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-virology|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-management|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-virology|5_2023-11-12T18-15-51.369317.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-management|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-virology|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|truthfulqa:mc|0_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["**/details_harness|winogrande|5_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-11-12T18-15-51.369317.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_11_12T18_15_51.369317", "path": ["results_2023-11-12T18-15-51.369317.parquet"]}, {"split": "latest", "path": ["results_2023-11-12T18-15-51.369317.parquet"]}]}]} | 2023-11-12T18:19:53+00:00 | []
| []
| TAGS
#region-us
|
# Dataset Card for Evaluation run of PulsarAI/SlimOpenOrca-Mistral-7B-v2
## Dataset Description
- Homepage:
- Repository: URL
- Paper:
- Leaderboard: URL
- Point of Contact: clementine@URL
### Dataset Summary
Dataset automatically created during the evaluation run of model PulsarAI/SlimOpenOrca-Mistral-7B-v2 on the Open LLM Leaderboard.
The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).
To load the details from a run, you can for instance do the following:
## Latest results
These are the latest results from run 2023-11-12T18:15:51.369317(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
### Supported Tasks and Leaderboards
### Languages
## Dataset Structure
### Data Instances
### Data Fields
### Data Splits
## Dataset Creation
### Curation Rationale
### Source Data
#### Initial Data Collection and Normalization
#### Who are the source language producers?
### Annotations
#### Annotation process
#### Who are the annotators?
### Personal and Sensitive Information
## Considerations for Using the Data
### Social Impact of Dataset
### Discussion of Biases
### Other Known Limitations
## Additional Information
### Dataset Curators
### Licensing Information
### Contributions
| [
"# Dataset Card for Evaluation run of PulsarAI/SlimOpenOrca-Mistral-7B-v2",
"## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL",
"### Dataset Summary\n\nDataset automatically created during the evaluation run of model PulsarAI/SlimOpenOrca-Mistral-7B-v2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:",
"## Latest results\n\nThese are the latest results from run 2023-11-12T18:15:51.369317(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):",
"### Supported Tasks and Leaderboards",
"### Languages",
"## Dataset Structure",
"### Data Instances",
"### Data Fields",
"### Data Splits",
"## Dataset Creation",
"### Curation Rationale",
"### Source Data",
"#### Initial Data Collection and Normalization",
"#### Who are the source language producers?",
"### Annotations",
"#### Annotation process",
"#### Who are the annotators?",
"### Personal and Sensitive Information",
"## Considerations for Using the Data",
"### Social Impact of Dataset",
"### Discussion of Biases",
"### Other Known Limitations",
"## Additional Information",
"### Dataset Curators",
"### Licensing Information",
"### Contributions"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for Evaluation run of PulsarAI/SlimOpenOrca-Mistral-7B-v2",
"## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL",
"### Dataset Summary\n\nDataset automatically created during the evaluation run of model PulsarAI/SlimOpenOrca-Mistral-7B-v2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:",
"## Latest results\n\nThese are the latest results from run 2023-11-12T18:15:51.369317(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):",
"### Supported Tasks and Leaderboards",
"### Languages",
"## Dataset Structure",
"### Data Instances",
"### Data Fields",
"### Data Splits",
"## Dataset Creation",
"### Curation Rationale",
"### Source Data",
"#### Initial Data Collection and Normalization",
"#### Who are the source language producers?",
"### Annotations",
"#### Annotation process",
"#### Who are the annotators?",
"### Personal and Sensitive Information",
"## Considerations for Using the Data",
"### Social Impact of Dataset",
"### Discussion of Biases",
"### Other Known Limitations",
"## Additional Information",
"### Dataset Curators",
"### Licensing Information",
"### Contributions"
]
| [
6,
26,
31,
175,
67,
10,
4,
6,
6,
5,
5,
5,
7,
4,
10,
10,
5,
5,
9,
8,
8,
7,
8,
7,
5,
6,
6,
5
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of PulsarAI/SlimOpenOrca-Mistral-7B-v2## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model PulsarAI/SlimOpenOrca-Mistral-7B-v2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-11-12T18:15:51.369317(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions"
]
|
8a3bdc5c8abb6f85ac745585f2943570ef079ffd | # Dataset Card for "zalo"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | hoanganhknk/zalo | [
"region:us"
]
| 2023-11-12T18:36:31+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "temp", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 71711.0, "num_examples": 1}], "download_size": 72626, "dataset_size": 71711.0}} | 2023-11-12T19:13:41+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "zalo"
More Information needed | [
"# Dataset Card for \"zalo\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"zalo\"\n\nMore Information needed"
]
| [
6,
12
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"zalo\"\n\nMore Information needed"
]
|
557331d04909372c78afc5f134f2e90a30bc5dc0 | # Dataset Card for "chatbotSentences-mini"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | KardelRuveyda/chatbotSentences-mini | [
"region:us"
]
| 2023-11-12T18:46:38+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}]}], "dataset_info": {"features": [{"name": "chatbottrainsentence", "dtype": "string"}, {"name": "train_sentences_length", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 139373705.28782204, "num_examples": 362520}, {"name": "validation", "num_bytes": 15486351.712177973, "num_examples": 40281}], "download_size": 96790843, "dataset_size": 154860057.0}} | 2023-11-22T05:17:20+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "chatbotSentences-mini"
More Information needed | [
"# Dataset Card for \"chatbotSentences-mini\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"chatbotSentences-mini\"\n\nMore Information needed"
]
| [
6,
17
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"chatbotSentences-mini\"\n\nMore Information needed"
]
|
2bbe5c183e89cdbe28f597ffcc7ba3a375930017 |
## IDMGSP Danish Translated
Danish translated version of the original [IDMGSP](https://huggingface.co/datasets/tum-nlp/IDMGSP) dataset. This Danish version contains all `abstracts` from the `classifier_input` subset. Translated using an [`Nllb-200-Distilled-600M`](https://huggingface.co/facebook/nllb-200-distilled-600M). Made as part of the [DM-AI 2023](https://dmiai.dk/) competition | ernlavr/IDMGSP-danish | [
"license:apache-2.0",
"region:us"
]
| 2023-11-12T18:52:42+00:00 | {"license": "apache-2.0", "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "label", "dtype": "int64"}, {"name": "abstract", "dtype": "string"}, {"name": "translation", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 24634068, "num_examples": 16000}, {"name": "test", "num_bytes": 12269388, "num_examples": 8000}], "download_size": 19382041, "dataset_size": 36903456}} | 2023-11-18T13:21:17+00:00 | []
| []
| TAGS
#license-apache-2.0 #region-us
|
## IDMGSP Danish Translated
Danish translated version of the original IDMGSP dataset. This Danish version contains all 'abstracts' from the 'classifier_input' subset. Translated using an 'Nllb-200-Distilled-600M'. Made as part of the DM-AI 2023 competition | [
"## IDMGSP Danish Translated\nDanish translated version of the original IDMGSP dataset. This Danish version contains all 'abstracts' from the 'classifier_input' subset. Translated using an 'Nllb-200-Distilled-600M'. Made as part of the DM-AI 2023 competition"
]
| [
"TAGS\n#license-apache-2.0 #region-us \n",
"## IDMGSP Danish Translated\nDanish translated version of the original IDMGSP dataset. This Danish version contains all 'abstracts' from the 'classifier_input' subset. Translated using an 'Nllb-200-Distilled-600M'. Made as part of the DM-AI 2023 competition"
]
| [
14,
75
]
| [
"passage: TAGS\n#license-apache-2.0 #region-us \n## IDMGSP Danish Translated\nDanish translated version of the original IDMGSP dataset. This Danish version contains all 'abstracts' from the 'classifier_input' subset. Translated using an 'Nllb-200-Distilled-600M'. Made as part of the DM-AI 2023 competition"
]
|
8e9d8826a957a50029ff234283bf405ed5b0f87d | # Dataset Card for "ImageNetSubset_16130352366404471562604770850950"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | DeepLearner101/ImageNetSubset_16130352366404471562604770850950 | [
"region:us"
]
| 2023-11-12T18:58:29+00:00 | {"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 65973972.0, "num_examples": 1760}, {"name": "validation", "num_bytes": 20185132.0, "num_examples": 550}, {"name": "test", "num_bytes": 15117832.0, "num_examples": 352}], "download_size": 101216900, "dataset_size": 101276936.0}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}, {"split": "test", "path": "data/test-*"}]}]} | 2023-11-13T12:42:48+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "ImageNetSubset_16130352366404471562604770850950"
More Information needed | [
"# Dataset Card for \"ImageNetSubset_16130352366404471562604770850950\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"ImageNetSubset_16130352366404471562604770850950\"\n\nMore Information needed"
]
| [
6,
28
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"ImageNetSubset_16130352366404471562604770850950\"\n\nMore Information needed"
]
|
1db7060e4ba5934c25025f107cbf3017e13ef1f6 |
# TimeQA
Check out the original [GitHub repo](https://github.com/wenhuchen/Time-Sensitive-QA/tree/main) to learn more about the dataset.
| hugosousa/TimeQA | [
"task_categories:question-answering",
"task_ids:closed-domain-qa",
"language_creators:crowdsourced",
"language_creators:machine-generated",
"multilinguality:monolingual",
"size_categories:10K<n<100K",
"language:en",
"license:bsd-3-clause-clear",
"region:us"
]
| 2023-11-12T19:31:09+00:00 | {"annotations_creators": [], "language_creators": ["crowdsourced", "machine-generated"], "language": ["en"], "license": ["bsd-3-clause-clear"], "multilinguality": ["monolingual"], "size_categories": ["10K<n<100K"], "source_datasets": [], "task_categories": ["question-answering"], "task_ids": ["closed-domain-qa"], "pretty_name": "TimeQA", "tags": []} | 2023-11-28T19:02:42+00:00 | []
| [
"en"
]
| TAGS
#task_categories-question-answering #task_ids-closed-domain-qa #language_creators-crowdsourced #language_creators-machine-generated #multilinguality-monolingual #size_categories-10K<n<100K #language-English #license-bsd-3-clause-clear #region-us
|
# TimeQA
Check out the original GitHub repo to learn more about the dataset.
| [
"# TimeQA\n\nCheck out the original GitHub repo to learn more about the dataset."
]
| [
"TAGS\n#task_categories-question-answering #task_ids-closed-domain-qa #language_creators-crowdsourced #language_creators-machine-generated #multilinguality-monolingual #size_categories-10K<n<100K #language-English #license-bsd-3-clause-clear #region-us \n",
"# TimeQA\n\nCheck out the original GitHub repo to learn more about the dataset."
]
| [
89,
19
]
| [
"passage: TAGS\n#task_categories-question-answering #task_ids-closed-domain-qa #language_creators-crowdsourced #language_creators-machine-generated #multilinguality-monolingual #size_categories-10K<n<100K #language-English #license-bsd-3-clause-clear #region-us \n# TimeQA\n\nCheck out the original GitHub repo to learn more about the dataset."
]
|
a564fef9c301ef3675649f348b27e1b7fd16b076 | # Dataset Card for "hboi_test"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | leonvanbokhorst/hboi_test | [
"region:us"
]
| 2023-11-12T19:32:14+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "output", "dtype": "string"}, {"name": "instruction", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 151364.55566905005, "num_examples": 900}, {"name": "test", "num_bytes": 13286.44433094995, "num_examples": 79}], "download_size": 65869, "dataset_size": 164651.0}} | 2023-11-12T19:32:20+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "hboi_test"
More Information needed | [
"# Dataset Card for \"hboi_test\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"hboi_test\"\n\nMore Information needed"
]
| [
6,
15
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"hboi_test\"\n\nMore Information needed"
]
|
4d9c259212e04f20d1e02c4eef92002d8dbbe0ae | # Dataset Card for "allison-katz"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | Chunt0/allison-katz | [
"region:us"
]
| 2023-11-12T19:41:23+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 54777344.0, "num_examples": 167}], "download_size": 54777540, "dataset_size": 54777344.0}} | 2023-11-13T01:01:05+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "allison-katz"
More Information needed | [
"# Dataset Card for \"allison-katz\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"allison-katz\"\n\nMore Information needed"
]
| [
6,
15
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"allison-katz\"\n\nMore Information needed"
]
|
2ce7320bf147880c08639885af741b5b60eb7a57 |
# Dataset Card for Evaluation run of Expert68/llama2_13b_instructed_version2
## Dataset Description
- **Homepage:**
- **Repository:** https://huggingface.co/Expert68/llama2_13b_instructed_version2
- **Paper:**
- **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
- **Point of Contact:** [email protected]
### Dataset Summary
Dataset automatically created during the evaluation run of model [Expert68/llama2_13b_instructed_version2](https://huggingface.co/Expert68/llama2_13b_instructed_version2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_Expert68__llama2_13b_instructed_version2_public",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2023-11-12T19:44:39.658427](https://huggingface.co/datasets/open-llm-leaderboard/details_Expert68__llama2_13b_instructed_version2_public/blob/main/results_2023-11-12T19-44-39.658427.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.5535385938067054,
"acc_stderr": 0.03382379046360409,
"acc_norm": 0.5616374813808622,
"acc_norm_stderr": 0.034597480068222046,
"mc1": 0.31456548347613217,
"mc1_stderr": 0.01625524199317918,
"mc2": 0.46118545589659976,
"mc2_stderr": 0.015483508114692393,
"em": 0.007340604026845637,
"em_stderr": 0.0008741896875345934,
"f1": 0.07567323825503336,
"f1_stderr": 0.0016747744191590948
},
"harness|arc:challenge|25": {
"acc": 0.5631399317406144,
"acc_stderr": 0.014494421584256519,
"acc_norm": 0.6006825938566553,
"acc_norm_stderr": 0.014312094557946705
},
"harness|hellaswag|10": {
"acc": 0.6412069308902609,
"acc_stderr": 0.004786660691181909,
"acc_norm": 0.8404700258912567,
"acc_norm_stderr": 0.003654212329516619
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252605,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252605
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.4740740740740741,
"acc_stderr": 0.04313531696750574,
"acc_norm": 0.4740740740740741,
"acc_norm_stderr": 0.04313531696750574
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.5460526315789473,
"acc_stderr": 0.04051646342874142,
"acc_norm": 0.5460526315789473,
"acc_norm_stderr": 0.04051646342874142
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.54,
"acc_stderr": 0.05009082659620332,
"acc_norm": 0.54,
"acc_norm_stderr": 0.05009082659620332
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.5924528301886792,
"acc_stderr": 0.030242233800854494,
"acc_norm": 0.5924528301886792,
"acc_norm_stderr": 0.030242233800854494
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.625,
"acc_stderr": 0.04048439222695598,
"acc_norm": 0.625,
"acc_norm_stderr": 0.04048439222695598
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.38,
"acc_stderr": 0.04878317312145633,
"acc_norm": 0.38,
"acc_norm_stderr": 0.04878317312145633
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.44,
"acc_stderr": 0.04988876515698589,
"acc_norm": 0.44,
"acc_norm_stderr": 0.04988876515698589
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.38,
"acc_stderr": 0.04878317312145632,
"acc_norm": 0.38,
"acc_norm_stderr": 0.04878317312145632
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.5144508670520231,
"acc_stderr": 0.03810871630454764,
"acc_norm": 0.5144508670520231,
"acc_norm_stderr": 0.03810871630454764
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.24509803921568626,
"acc_stderr": 0.04280105837364397,
"acc_norm": 0.24509803921568626,
"acc_norm_stderr": 0.04280105837364397
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.62,
"acc_stderr": 0.048783173121456316,
"acc_norm": 0.62,
"acc_norm_stderr": 0.048783173121456316
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.451063829787234,
"acc_stderr": 0.032529096196131965,
"acc_norm": 0.451063829787234,
"acc_norm_stderr": 0.032529096196131965
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.3333333333333333,
"acc_stderr": 0.044346007015849245,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.044346007015849245
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.46206896551724136,
"acc_stderr": 0.041546596717075474,
"acc_norm": 0.46206896551724136,
"acc_norm_stderr": 0.041546596717075474
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.32275132275132273,
"acc_stderr": 0.024078943243597016,
"acc_norm": 0.32275132275132273,
"acc_norm_stderr": 0.024078943243597016
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.35714285714285715,
"acc_stderr": 0.042857142857142816,
"acc_norm": 0.35714285714285715,
"acc_norm_stderr": 0.042857142857142816
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.39,
"acc_stderr": 0.04902071300001975,
"acc_norm": 0.39,
"acc_norm_stderr": 0.04902071300001975
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.6483870967741936,
"acc_stderr": 0.02716253782694846,
"acc_norm": 0.6483870967741936,
"acc_norm_stderr": 0.02716253782694846
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.43842364532019706,
"acc_stderr": 0.03491207857486518,
"acc_norm": 0.43842364532019706,
"acc_norm_stderr": 0.03491207857486518
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.53,
"acc_stderr": 0.05016135580465919,
"acc_norm": 0.53,
"acc_norm_stderr": 0.05016135580465919
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.696969696969697,
"acc_stderr": 0.03588624800091707,
"acc_norm": 0.696969696969697,
"acc_norm_stderr": 0.03588624800091707
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.6717171717171717,
"acc_stderr": 0.03345678422756776,
"acc_norm": 0.6717171717171717,
"acc_norm_stderr": 0.03345678422756776
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8238341968911918,
"acc_stderr": 0.027493504244548057,
"acc_norm": 0.8238341968911918,
"acc_norm_stderr": 0.027493504244548057
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.5153846153846153,
"acc_stderr": 0.025339003010106515,
"acc_norm": 0.5153846153846153,
"acc_norm_stderr": 0.025339003010106515
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.26666666666666666,
"acc_stderr": 0.026962424325073835,
"acc_norm": 0.26666666666666666,
"acc_norm_stderr": 0.026962424325073835
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.592436974789916,
"acc_stderr": 0.031918633744784645,
"acc_norm": 0.592436974789916,
"acc_norm_stderr": 0.031918633744784645
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.3708609271523179,
"acc_stderr": 0.03943966699183629,
"acc_norm": 0.3708609271523179,
"acc_norm_stderr": 0.03943966699183629
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.728440366972477,
"acc_stderr": 0.019069098363191428,
"acc_norm": 0.728440366972477,
"acc_norm_stderr": 0.019069098363191428
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.42592592592592593,
"acc_stderr": 0.03372343271653064,
"acc_norm": 0.42592592592592593,
"acc_norm_stderr": 0.03372343271653064
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.7745098039215687,
"acc_stderr": 0.02933116229425174,
"acc_norm": 0.7745098039215687,
"acc_norm_stderr": 0.02933116229425174
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.7383966244725738,
"acc_stderr": 0.028609516716994934,
"acc_norm": 0.7383966244725738,
"acc_norm_stderr": 0.028609516716994934
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.6771300448430493,
"acc_stderr": 0.031381476375754995,
"acc_norm": 0.6771300448430493,
"acc_norm_stderr": 0.031381476375754995
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.5801526717557252,
"acc_stderr": 0.04328577215262971,
"acc_norm": 0.5801526717557252,
"acc_norm_stderr": 0.04328577215262971
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.7107438016528925,
"acc_stderr": 0.041391127276354626,
"acc_norm": 0.7107438016528925,
"acc_norm_stderr": 0.041391127276354626
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.6944444444444444,
"acc_stderr": 0.044531975073749834,
"acc_norm": 0.6944444444444444,
"acc_norm_stderr": 0.044531975073749834
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.6687116564417178,
"acc_stderr": 0.03697983910025588,
"acc_norm": 0.6687116564417178,
"acc_norm_stderr": 0.03697983910025588
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.39285714285714285,
"acc_stderr": 0.04635550135609976,
"acc_norm": 0.39285714285714285,
"acc_norm_stderr": 0.04635550135609976
},
"harness|hendrycksTest-management|5": {
"acc": 0.7184466019417476,
"acc_stderr": 0.04453254836326467,
"acc_norm": 0.7184466019417476,
"acc_norm_stderr": 0.04453254836326467
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8205128205128205,
"acc_stderr": 0.025140935950335442,
"acc_norm": 0.8205128205128205,
"acc_norm_stderr": 0.025140935950335442
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.57,
"acc_stderr": 0.049756985195624284,
"acc_norm": 0.57,
"acc_norm_stderr": 0.049756985195624284
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.7637292464878672,
"acc_stderr": 0.01519047371703751,
"acc_norm": 0.7637292464878672,
"acc_norm_stderr": 0.01519047371703751
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.6242774566473989,
"acc_stderr": 0.02607431485165708,
"acc_norm": 0.6242774566473989,
"acc_norm_stderr": 0.02607431485165708
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.4011173184357542,
"acc_stderr": 0.01639222189940707,
"acc_norm": 0.4011173184357542,
"acc_norm_stderr": 0.01639222189940707
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.5915032679738562,
"acc_stderr": 0.028146405993096358,
"acc_norm": 0.5915032679738562,
"acc_norm_stderr": 0.028146405993096358
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.6527331189710611,
"acc_stderr": 0.027040745502307336,
"acc_norm": 0.6527331189710611,
"acc_norm_stderr": 0.027040745502307336
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.6604938271604939,
"acc_stderr": 0.026348564412011624,
"acc_norm": 0.6604938271604939,
"acc_norm_stderr": 0.026348564412011624
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.43617021276595747,
"acc_stderr": 0.02958345203628407,
"acc_norm": 0.43617021276595747,
"acc_norm_stderr": 0.02958345203628407
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.4380704041720991,
"acc_stderr": 0.01267190278256765,
"acc_norm": 0.4380704041720991,
"acc_norm_stderr": 0.01267190278256765
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.5330882352941176,
"acc_stderr": 0.03030625772246831,
"acc_norm": 0.5330882352941176,
"acc_norm_stderr": 0.03030625772246831
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.5751633986928104,
"acc_stderr": 0.01999797303545833,
"acc_norm": 0.5751633986928104,
"acc_norm_stderr": 0.01999797303545833
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6363636363636364,
"acc_stderr": 0.04607582090719976,
"acc_norm": 0.6363636363636364,
"acc_norm_stderr": 0.04607582090719976
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.6,
"acc_stderr": 0.03136250240935893,
"acc_norm": 0.6,
"acc_norm_stderr": 0.03136250240935893
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.7164179104477612,
"acc_stderr": 0.031871875379197966,
"acc_norm": 0.7164179104477612,
"acc_norm_stderr": 0.031871875379197966
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.79,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.79,
"acc_norm_stderr": 0.040936018074033256
},
"harness|hendrycksTest-virology|5": {
"acc": 0.45180722891566266,
"acc_stderr": 0.03874371556587953,
"acc_norm": 0.45180722891566266,
"acc_norm_stderr": 0.03874371556587953
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.7660818713450293,
"acc_stderr": 0.03246721765117826,
"acc_norm": 0.7660818713450293,
"acc_norm_stderr": 0.03246721765117826
},
"harness|truthfulqa:mc|0": {
"mc1": 0.31456548347613217,
"mc1_stderr": 0.01625524199317918,
"mc2": 0.46118545589659976,
"mc2_stderr": 0.015483508114692393
},
"harness|winogrande|5": {
"acc": 0.7561168113654302,
"acc_stderr": 0.012068923278908194
},
"harness|drop|3": {
"em": 0.007340604026845637,
"em_stderr": 0.0008741896875345934,
"f1": 0.07567323825503336,
"f1_stderr": 0.0016747744191590948
},
"harness|gsm8k|5": {
"acc": 0.10993176648976498,
"acc_stderr": 0.008616195587865397
}
}
```
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
[More Information Needed] | open-llm-leaderboard/details_Expert68__llama2_13b_instructed_version2 | [
"region:us"
]
| 2023-11-12T19:47:45+00:00 | {"pretty_name": "Evaluation run of Expert68/llama2_13b_instructed_version2", "dataset_summary": "Dataset automatically created during the evaluation run of model [Expert68/llama2_13b_instructed_version2](https://huggingface.co/Expert68/llama2_13b_instructed_version2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Expert68__llama2_13b_instructed_version2_public\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-11-12T19:44:39.658427](https://huggingface.co/datasets/open-llm-leaderboard/details_Expert68__llama2_13b_instructed_version2_public/blob/main/results_2023-11-12T19-44-39.658427.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5535385938067054,\n \"acc_stderr\": 0.03382379046360409,\n \"acc_norm\": 0.5616374813808622,\n \"acc_norm_stderr\": 0.034597480068222046,\n \"mc1\": 0.31456548347613217,\n \"mc1_stderr\": 0.01625524199317918,\n \"mc2\": 0.46118545589659976,\n \"mc2_stderr\": 0.015483508114692393,\n \"em\": 0.007340604026845637,\n \"em_stderr\": 0.0008741896875345934,\n \"f1\": 0.07567323825503336,\n \"f1_stderr\": 0.0016747744191590948\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5631399317406144,\n \"acc_stderr\": 0.014494421584256519,\n \"acc_norm\": 0.6006825938566553,\n \"acc_norm_stderr\": 0.014312094557946705\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6412069308902609,\n \"acc_stderr\": 0.004786660691181909,\n \"acc_norm\": 0.8404700258912567,\n \"acc_norm_stderr\": 0.003654212329516619\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252605,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252605\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4740740740740741,\n \"acc_stderr\": 0.04313531696750574,\n \"acc_norm\": 0.4740740740740741,\n \"acc_norm_stderr\": 0.04313531696750574\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.5460526315789473,\n \"acc_stderr\": 0.04051646342874142,\n \"acc_norm\": 0.5460526315789473,\n \"acc_norm_stderr\": 0.04051646342874142\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.5924528301886792,\n \"acc_stderr\": 0.030242233800854494,\n \"acc_norm\": 0.5924528301886792,\n \"acc_norm_stderr\": 0.030242233800854494\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.625,\n \"acc_stderr\": 0.04048439222695598,\n \"acc_norm\": 0.625,\n \"acc_norm_stderr\": 0.04048439222695598\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145633,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145633\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145632,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145632\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5144508670520231,\n \"acc_stderr\": 0.03810871630454764,\n \"acc_norm\": 0.5144508670520231,\n \"acc_norm_stderr\": 0.03810871630454764\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.24509803921568626,\n \"acc_stderr\": 0.04280105837364397,\n \"acc_norm\": 0.24509803921568626,\n \"acc_norm_stderr\": 0.04280105837364397\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.62,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.451063829787234,\n \"acc_stderr\": 0.032529096196131965,\n \"acc_norm\": 0.451063829787234,\n \"acc_norm_stderr\": 0.032529096196131965\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.044346007015849245,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.044346007015849245\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.46206896551724136,\n \"acc_stderr\": 0.041546596717075474,\n \"acc_norm\": 0.46206896551724136,\n \"acc_norm_stderr\": 0.041546596717075474\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.32275132275132273,\n \"acc_stderr\": 0.024078943243597016,\n \"acc_norm\": 0.32275132275132273,\n \"acc_norm_stderr\": 0.024078943243597016\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.35714285714285715,\n \"acc_stderr\": 0.042857142857142816,\n \"acc_norm\": 0.35714285714285715,\n \"acc_norm_stderr\": 0.042857142857142816\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6483870967741936,\n \"acc_stderr\": 0.02716253782694846,\n \"acc_norm\": 0.6483870967741936,\n \"acc_norm_stderr\": 0.02716253782694846\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.43842364532019706,\n \"acc_stderr\": 0.03491207857486518,\n \"acc_norm\": 0.43842364532019706,\n \"acc_norm_stderr\": 0.03491207857486518\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.696969696969697,\n \"acc_stderr\": 0.03588624800091707,\n \"acc_norm\": 0.696969696969697,\n \"acc_norm_stderr\": 0.03588624800091707\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.6717171717171717,\n \"acc_stderr\": 0.03345678422756776,\n \"acc_norm\": 0.6717171717171717,\n \"acc_norm_stderr\": 0.03345678422756776\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8238341968911918,\n \"acc_stderr\": 0.027493504244548057,\n \"acc_norm\": 0.8238341968911918,\n \"acc_norm_stderr\": 0.027493504244548057\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5153846153846153,\n \"acc_stderr\": 0.025339003010106515,\n \"acc_norm\": 0.5153846153846153,\n \"acc_norm_stderr\": 0.025339003010106515\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.26666666666666666,\n \"acc_stderr\": 0.026962424325073835,\n \"acc_norm\": 0.26666666666666666,\n \"acc_norm_stderr\": 0.026962424325073835\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.592436974789916,\n \"acc_stderr\": 0.031918633744784645,\n \"acc_norm\": 0.592436974789916,\n \"acc_norm_stderr\": 0.031918633744784645\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3708609271523179,\n \"acc_stderr\": 0.03943966699183629,\n \"acc_norm\": 0.3708609271523179,\n \"acc_norm_stderr\": 0.03943966699183629\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.728440366972477,\n \"acc_stderr\": 0.019069098363191428,\n \"acc_norm\": 0.728440366972477,\n \"acc_norm_stderr\": 0.019069098363191428\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.42592592592592593,\n \"acc_stderr\": 0.03372343271653064,\n \"acc_norm\": 0.42592592592592593,\n \"acc_norm_stderr\": 0.03372343271653064\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7745098039215687,\n \"acc_stderr\": 0.02933116229425174,\n \"acc_norm\": 0.7745098039215687,\n \"acc_norm_stderr\": 0.02933116229425174\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7383966244725738,\n \"acc_stderr\": 0.028609516716994934,\n \"acc_norm\": 0.7383966244725738,\n \"acc_norm_stderr\": 0.028609516716994934\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6771300448430493,\n \"acc_stderr\": 0.031381476375754995,\n \"acc_norm\": 0.6771300448430493,\n \"acc_norm_stderr\": 0.031381476375754995\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.5801526717557252,\n \"acc_stderr\": 0.04328577215262971,\n \"acc_norm\": 0.5801526717557252,\n \"acc_norm_stderr\": 0.04328577215262971\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7107438016528925,\n \"acc_stderr\": 0.041391127276354626,\n \"acc_norm\": 0.7107438016528925,\n \"acc_norm_stderr\": 0.041391127276354626\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6944444444444444,\n \"acc_stderr\": 0.044531975073749834,\n \"acc_norm\": 0.6944444444444444,\n \"acc_norm_stderr\": 0.044531975073749834\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6687116564417178,\n \"acc_stderr\": 0.03697983910025588,\n \"acc_norm\": 0.6687116564417178,\n \"acc_norm_stderr\": 0.03697983910025588\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.39285714285714285,\n \"acc_stderr\": 0.04635550135609976,\n \"acc_norm\": 0.39285714285714285,\n \"acc_norm_stderr\": 0.04635550135609976\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7184466019417476,\n \"acc_stderr\": 0.04453254836326467,\n \"acc_norm\": 0.7184466019417476,\n \"acc_norm_stderr\": 0.04453254836326467\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8205128205128205,\n \"acc_stderr\": 0.025140935950335442,\n \"acc_norm\": 0.8205128205128205,\n \"acc_norm_stderr\": 0.025140935950335442\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7637292464878672,\n \"acc_stderr\": 0.01519047371703751,\n \"acc_norm\": 0.7637292464878672,\n \"acc_norm_stderr\": 0.01519047371703751\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6242774566473989,\n \"acc_stderr\": 0.02607431485165708,\n \"acc_norm\": 0.6242774566473989,\n \"acc_norm_stderr\": 0.02607431485165708\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4011173184357542,\n \"acc_stderr\": 0.01639222189940707,\n \"acc_norm\": 0.4011173184357542,\n \"acc_norm_stderr\": 0.01639222189940707\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.5915032679738562,\n \"acc_stderr\": 0.028146405993096358,\n \"acc_norm\": 0.5915032679738562,\n \"acc_norm_stderr\": 0.028146405993096358\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6527331189710611,\n \"acc_stderr\": 0.027040745502307336,\n \"acc_norm\": 0.6527331189710611,\n \"acc_norm_stderr\": 0.027040745502307336\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6604938271604939,\n \"acc_stderr\": 0.026348564412011624,\n \"acc_norm\": 0.6604938271604939,\n \"acc_norm_stderr\": 0.026348564412011624\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.43617021276595747,\n \"acc_stderr\": 0.02958345203628407,\n \"acc_norm\": 0.43617021276595747,\n \"acc_norm_stderr\": 0.02958345203628407\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4380704041720991,\n \"acc_stderr\": 0.01267190278256765,\n \"acc_norm\": 0.4380704041720991,\n \"acc_norm_stderr\": 0.01267190278256765\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5330882352941176,\n \"acc_stderr\": 0.03030625772246831,\n \"acc_norm\": 0.5330882352941176,\n \"acc_norm_stderr\": 0.03030625772246831\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.5751633986928104,\n \"acc_stderr\": 0.01999797303545833,\n \"acc_norm\": 0.5751633986928104,\n \"acc_norm_stderr\": 0.01999797303545833\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6363636363636364,\n \"acc_stderr\": 0.04607582090719976,\n \"acc_norm\": 0.6363636363636364,\n \"acc_norm_stderr\": 0.04607582090719976\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.03136250240935893,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.03136250240935893\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7164179104477612,\n \"acc_stderr\": 0.031871875379197966,\n \"acc_norm\": 0.7164179104477612,\n \"acc_norm_stderr\": 0.031871875379197966\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.79,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.79,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.45180722891566266,\n \"acc_stderr\": 0.03874371556587953,\n \"acc_norm\": 0.45180722891566266,\n \"acc_norm_stderr\": 0.03874371556587953\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7660818713450293,\n \"acc_stderr\": 0.03246721765117826,\n \"acc_norm\": 0.7660818713450293,\n \"acc_norm_stderr\": 0.03246721765117826\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.31456548347613217,\n \"mc1_stderr\": 0.01625524199317918,\n \"mc2\": 0.46118545589659976,\n \"mc2_stderr\": 0.015483508114692393\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7561168113654302,\n \"acc_stderr\": 0.012068923278908194\n },\n \"harness|drop|3\": {\n \"em\": 0.007340604026845637,\n \"em_stderr\": 0.0008741896875345934,\n \"f1\": 0.07567323825503336,\n \"f1_stderr\": 0.0016747744191590948\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.10993176648976498,\n \"acc_stderr\": 0.008616195587865397\n }\n}\n```", "repo_url": "https://huggingface.co/Expert68/llama2_13b_instructed_version2", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|arc:challenge|25_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|drop|3_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|gsm8k|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hellaswag|10_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-management|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-virology|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-management|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-virology|5_2023-11-12T19-44-39.658427.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-management|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-virology|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|truthfulqa:mc|0_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["**/details_harness|winogrande|5_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-11-12T19-44-39.658427.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_11_12T19_44_39.658427", "path": ["results_2023-11-12T19-44-39.658427.parquet"]}, {"split": "latest", "path": ["results_2023-11-12T19-44-39.658427.parquet"]}]}]} | 2023-11-12T19:48:46+00:00 | []
| []
| TAGS
#region-us
|
# Dataset Card for Evaluation run of Expert68/llama2_13b_instructed_version2
## Dataset Description
- Homepage:
- Repository: URL
- Paper:
- Leaderboard: URL
- Point of Contact: clementine@URL
### Dataset Summary
Dataset automatically created during the evaluation run of model Expert68/llama2_13b_instructed_version2 on the Open LLM Leaderboard.
The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).
To load the details from a run, you can for instance do the following:
## Latest results
These are the latest results from run 2023-11-12T19:44:39.658427(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
### Supported Tasks and Leaderboards
### Languages
## Dataset Structure
### Data Instances
### Data Fields
### Data Splits
## Dataset Creation
### Curation Rationale
### Source Data
#### Initial Data Collection and Normalization
#### Who are the source language producers?
### Annotations
#### Annotation process
#### Who are the annotators?
### Personal and Sensitive Information
## Considerations for Using the Data
### Social Impact of Dataset
### Discussion of Biases
### Other Known Limitations
## Additional Information
### Dataset Curators
### Licensing Information
### Contributions
| [
"# Dataset Card for Evaluation run of Expert68/llama2_13b_instructed_version2",
"## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL",
"### Dataset Summary\n\nDataset automatically created during the evaluation run of model Expert68/llama2_13b_instructed_version2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:",
"## Latest results\n\nThese are the latest results from run 2023-11-12T19:44:39.658427(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):",
"### Supported Tasks and Leaderboards",
"### Languages",
"## Dataset Structure",
"### Data Instances",
"### Data Fields",
"### Data Splits",
"## Dataset Creation",
"### Curation Rationale",
"### Source Data",
"#### Initial Data Collection and Normalization",
"#### Who are the source language producers?",
"### Annotations",
"#### Annotation process",
"#### Who are the annotators?",
"### Personal and Sensitive Information",
"## Considerations for Using the Data",
"### Social Impact of Dataset",
"### Discussion of Biases",
"### Other Known Limitations",
"## Additional Information",
"### Dataset Curators",
"### Licensing Information",
"### Contributions"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for Evaluation run of Expert68/llama2_13b_instructed_version2",
"## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL",
"### Dataset Summary\n\nDataset automatically created during the evaluation run of model Expert68/llama2_13b_instructed_version2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:",
"## Latest results\n\nThese are the latest results from run 2023-11-12T19:44:39.658427(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):",
"### Supported Tasks and Leaderboards",
"### Languages",
"## Dataset Structure",
"### Data Instances",
"### Data Fields",
"### Data Splits",
"## Dataset Creation",
"### Curation Rationale",
"### Source Data",
"#### Initial Data Collection and Normalization",
"#### Who are the source language producers?",
"### Annotations",
"#### Annotation process",
"#### Who are the annotators?",
"### Personal and Sensitive Information",
"## Considerations for Using the Data",
"### Social Impact of Dataset",
"### Discussion of Biases",
"### Other Known Limitations",
"## Additional Information",
"### Dataset Curators",
"### Licensing Information",
"### Contributions"
]
| [
6,
25,
31,
174,
67,
10,
4,
6,
6,
5,
5,
5,
7,
4,
10,
10,
5,
5,
9,
8,
8,
7,
8,
7,
5,
6,
6,
5
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Expert68/llama2_13b_instructed_version2## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model Expert68/llama2_13b_instructed_version2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-11-12T19:44:39.658427(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions"
]
|
51b6c5ba2de31859728f00e8558845e3c3f03c2a |
# Dataset Card for Evaluation run of NeverSleep/Mistral-11B-SynthIAirOmniMix
## Dataset Description
- **Homepage:**
- **Repository:** https://huggingface.co/NeverSleep/Mistral-11B-SynthIAirOmniMix
- **Paper:**
- **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
- **Point of Contact:** [email protected]
### Dataset Summary
Dataset automatically created during the evaluation run of model [NeverSleep/Mistral-11B-SynthIAirOmniMix](https://huggingface.co/NeverSleep/Mistral-11B-SynthIAirOmniMix) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_NeverSleep__Mistral-11B-SynthIAirOmniMix_public",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2023-11-12T19:54:58.939194](https://huggingface.co/datasets/open-llm-leaderboard/details_NeverSleep__Mistral-11B-SynthIAirOmniMix_public/blob/main/results_2023-11-12T19-54-58.939194.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.6277127436205546,
"acc_stderr": 0.03243061765974366,
"acc_norm": 0.6378229900253635,
"acc_norm_stderr": 0.03315507636067878,
"mc1": 0.3880048959608323,
"mc1_stderr": 0.017058761501347972,
"mc2": 0.5568818997417452,
"mc2_stderr": 0.015517245006607807,
"em": 0.23259228187919462,
"em_stderr": 0.004326636227794088,
"f1": 0.28881291946308657,
"f1_stderr": 0.004306419385994737
},
"harness|arc:challenge|25": {
"acc": 0.5921501706484642,
"acc_stderr": 0.014361097288449705,
"acc_norm": 0.6245733788395904,
"acc_norm_stderr": 0.014150631435111728
},
"harness|hellaswag|10": {
"acc": 0.6396136227843059,
"acc_stderr": 0.004791313101877047,
"acc_norm": 0.8313085042820155,
"acc_norm_stderr": 0.003737138752336941
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.35,
"acc_stderr": 0.04793724854411022,
"acc_norm": 0.35,
"acc_norm_stderr": 0.04793724854411022
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.6074074074074074,
"acc_stderr": 0.0421850621536888,
"acc_norm": 0.6074074074074074,
"acc_norm_stderr": 0.0421850621536888
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.625,
"acc_stderr": 0.039397364351956274,
"acc_norm": 0.625,
"acc_norm_stderr": 0.039397364351956274
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.58,
"acc_stderr": 0.049604496374885836,
"acc_norm": 0.58,
"acc_norm_stderr": 0.049604496374885836
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.6754716981132075,
"acc_stderr": 0.028815615713432115,
"acc_norm": 0.6754716981132075,
"acc_norm_stderr": 0.028815615713432115
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.7222222222222222,
"acc_stderr": 0.03745554791462456,
"acc_norm": 0.7222222222222222,
"acc_norm_stderr": 0.03745554791462456
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.48,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.48,
"acc_norm_stderr": 0.050211673156867795
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.53,
"acc_stderr": 0.05016135580465919,
"acc_norm": 0.53,
"acc_norm_stderr": 0.05016135580465919
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.41,
"acc_stderr": 0.04943110704237101,
"acc_norm": 0.41,
"acc_norm_stderr": 0.04943110704237101
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.630057803468208,
"acc_stderr": 0.0368122963339432,
"acc_norm": 0.630057803468208,
"acc_norm_stderr": 0.0368122963339432
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.3627450980392157,
"acc_stderr": 0.04784060704105653,
"acc_norm": 0.3627450980392157,
"acc_norm_stderr": 0.04784060704105653
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.76,
"acc_stderr": 0.042923469599092816,
"acc_norm": 0.76,
"acc_norm_stderr": 0.042923469599092816
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5574468085106383,
"acc_stderr": 0.03246956919789958,
"acc_norm": 0.5574468085106383,
"acc_norm_stderr": 0.03246956919789958
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.45614035087719296,
"acc_stderr": 0.04685473041907789,
"acc_norm": 0.45614035087719296,
"acc_norm_stderr": 0.04685473041907789
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5655172413793104,
"acc_stderr": 0.04130740879555497,
"acc_norm": 0.5655172413793104,
"acc_norm_stderr": 0.04130740879555497
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.3888888888888889,
"acc_stderr": 0.025107425481137282,
"acc_norm": 0.3888888888888889,
"acc_norm_stderr": 0.025107425481137282
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.3888888888888889,
"acc_stderr": 0.04360314860077459,
"acc_norm": 0.3888888888888889,
"acc_norm_stderr": 0.04360314860077459
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.4,
"acc_stderr": 0.04923659639173309,
"acc_norm": 0.4,
"acc_norm_stderr": 0.04923659639173309
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.7677419354838709,
"acc_stderr": 0.024022256130308235,
"acc_norm": 0.7677419354838709,
"acc_norm_stderr": 0.024022256130308235
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.4975369458128079,
"acc_stderr": 0.03517945038691063,
"acc_norm": 0.4975369458128079,
"acc_norm_stderr": 0.03517945038691063
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.69,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.69,
"acc_norm_stderr": 0.04648231987117316
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7575757575757576,
"acc_stderr": 0.03346409881055953,
"acc_norm": 0.7575757575757576,
"acc_norm_stderr": 0.03346409881055953
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.8080808080808081,
"acc_stderr": 0.02805779167298901,
"acc_norm": 0.8080808080808081,
"acc_norm_stderr": 0.02805779167298901
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8756476683937824,
"acc_stderr": 0.02381447708659355,
"acc_norm": 0.8756476683937824,
"acc_norm_stderr": 0.02381447708659355
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.6743589743589744,
"acc_stderr": 0.02375966576741229,
"acc_norm": 0.6743589743589744,
"acc_norm_stderr": 0.02375966576741229
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.3333333333333333,
"acc_stderr": 0.028742040903948492,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.028742040903948492
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.6848739495798319,
"acc_stderr": 0.03017680828897434,
"acc_norm": 0.6848739495798319,
"acc_norm_stderr": 0.03017680828897434
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.304635761589404,
"acc_stderr": 0.03757949922943343,
"acc_norm": 0.304635761589404,
"acc_norm_stderr": 0.03757949922943343
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8220183486238533,
"acc_stderr": 0.01639943636661292,
"acc_norm": 0.8220183486238533,
"acc_norm_stderr": 0.01639943636661292
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.5092592592592593,
"acc_stderr": 0.034093869469927006,
"acc_norm": 0.5092592592592593,
"acc_norm_stderr": 0.034093869469927006
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.8235294117647058,
"acc_stderr": 0.026756401538078966,
"acc_norm": 0.8235294117647058,
"acc_norm_stderr": 0.026756401538078966
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.7890295358649789,
"acc_stderr": 0.02655837250266192,
"acc_norm": 0.7890295358649789,
"acc_norm_stderr": 0.02655837250266192
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.7085201793721974,
"acc_stderr": 0.03050028317654585,
"acc_norm": 0.7085201793721974,
"acc_norm_stderr": 0.03050028317654585
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.7786259541984732,
"acc_stderr": 0.0364129708131373,
"acc_norm": 0.7786259541984732,
"acc_norm_stderr": 0.0364129708131373
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.7768595041322314,
"acc_stderr": 0.03800754475228732,
"acc_norm": 0.7768595041322314,
"acc_norm_stderr": 0.03800754475228732
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.7592592592592593,
"acc_stderr": 0.04133119440243838,
"acc_norm": 0.7592592592592593,
"acc_norm_stderr": 0.04133119440243838
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.7607361963190185,
"acc_stderr": 0.0335195387952127,
"acc_norm": 0.7607361963190185,
"acc_norm_stderr": 0.0335195387952127
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.4642857142857143,
"acc_stderr": 0.04733667890053756,
"acc_norm": 0.4642857142857143,
"acc_norm_stderr": 0.04733667890053756
},
"harness|hendrycksTest-management|5": {
"acc": 0.7961165048543689,
"acc_stderr": 0.03989139859531771,
"acc_norm": 0.7961165048543689,
"acc_norm_stderr": 0.03989139859531771
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8632478632478633,
"acc_stderr": 0.022509033937077816,
"acc_norm": 0.8632478632478633,
"acc_norm_stderr": 0.022509033937077816
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.7,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.7,
"acc_norm_stderr": 0.046056618647183814
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8160919540229885,
"acc_stderr": 0.01385372417092253,
"acc_norm": 0.8160919540229885,
"acc_norm_stderr": 0.01385372417092253
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.6820809248554913,
"acc_stderr": 0.025070713719153186,
"acc_norm": 0.6820809248554913,
"acc_norm_stderr": 0.025070713719153186
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.37318435754189944,
"acc_stderr": 0.016175692013381968,
"acc_norm": 0.37318435754189944,
"acc_norm_stderr": 0.016175692013381968
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.7222222222222222,
"acc_stderr": 0.0256468630971379,
"acc_norm": 0.7222222222222222,
"acc_norm_stderr": 0.0256468630971379
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.684887459807074,
"acc_stderr": 0.026385273703464482,
"acc_norm": 0.684887459807074,
"acc_norm_stderr": 0.026385273703464482
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7253086419753086,
"acc_stderr": 0.024836057868294677,
"acc_norm": 0.7253086419753086,
"acc_norm_stderr": 0.024836057868294677
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.48226950354609927,
"acc_stderr": 0.02980873964223777,
"acc_norm": 0.48226950354609927,
"acc_norm_stderr": 0.02980873964223777
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.45436766623207303,
"acc_stderr": 0.012716941720734804,
"acc_norm": 0.45436766623207303,
"acc_norm_stderr": 0.012716941720734804
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6838235294117647,
"acc_stderr": 0.028245687391462927,
"acc_norm": 0.6838235294117647,
"acc_norm_stderr": 0.028245687391462927
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6503267973856209,
"acc_stderr": 0.01929196189506638,
"acc_norm": 0.6503267973856209,
"acc_norm_stderr": 0.01929196189506638
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6727272727272727,
"acc_stderr": 0.04494290866252091,
"acc_norm": 0.6727272727272727,
"acc_norm_stderr": 0.04494290866252091
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.7387755102040816,
"acc_stderr": 0.02812342933514278,
"acc_norm": 0.7387755102040816,
"acc_norm_stderr": 0.02812342933514278
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.845771144278607,
"acc_stderr": 0.025538433368578334,
"acc_norm": 0.845771144278607,
"acc_norm_stderr": 0.025538433368578334
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.83,
"acc_stderr": 0.0377525168068637,
"acc_norm": 0.83,
"acc_norm_stderr": 0.0377525168068637
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5542168674698795,
"acc_stderr": 0.038695433234721015,
"acc_norm": 0.5542168674698795,
"acc_norm_stderr": 0.038695433234721015
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8245614035087719,
"acc_stderr": 0.029170885500727668,
"acc_norm": 0.8245614035087719,
"acc_norm_stderr": 0.029170885500727668
},
"harness|truthfulqa:mc|0": {
"mc1": 0.3880048959608323,
"mc1_stderr": 0.017058761501347972,
"mc2": 0.5568818997417452,
"mc2_stderr": 0.015517245006607807
},
"harness|winogrande|5": {
"acc": 0.7640094711917916,
"acc_stderr": 0.011933828850275626
},
"harness|drop|3": {
"em": 0.23259228187919462,
"em_stderr": 0.004326636227794088,
"f1": 0.28881291946308657,
"f1_stderr": 0.004306419385994737
},
"harness|gsm8k|5": {
"acc": 0.11902956785443518,
"acc_stderr": 0.00891970291116164
}
}
```
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
[More Information Needed] | open-llm-leaderboard/details_NeverSleep__Mistral-11B-SynthIAirOmniMix | [
"region:us"
]
| 2023-11-12T19:57:59+00:00 | {"pretty_name": "Evaluation run of NeverSleep/Mistral-11B-SynthIAirOmniMix", "dataset_summary": "Dataset automatically created during the evaluation run of model [NeverSleep/Mistral-11B-SynthIAirOmniMix](https://huggingface.co/NeverSleep/Mistral-11B-SynthIAirOmniMix) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_NeverSleep__Mistral-11B-SynthIAirOmniMix_public\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-11-12T19:54:58.939194](https://huggingface.co/datasets/open-llm-leaderboard/details_NeverSleep__Mistral-11B-SynthIAirOmniMix_public/blob/main/results_2023-11-12T19-54-58.939194.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6277127436205546,\n \"acc_stderr\": 0.03243061765974366,\n \"acc_norm\": 0.6378229900253635,\n \"acc_norm_stderr\": 0.03315507636067878,\n \"mc1\": 0.3880048959608323,\n \"mc1_stderr\": 0.017058761501347972,\n \"mc2\": 0.5568818997417452,\n \"mc2_stderr\": 0.015517245006607807,\n \"em\": 0.23259228187919462,\n \"em_stderr\": 0.004326636227794088,\n \"f1\": 0.28881291946308657,\n \"f1_stderr\": 0.004306419385994737\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5921501706484642,\n \"acc_stderr\": 0.014361097288449705,\n \"acc_norm\": 0.6245733788395904,\n \"acc_norm_stderr\": 0.014150631435111728\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6396136227843059,\n \"acc_stderr\": 0.004791313101877047,\n \"acc_norm\": 0.8313085042820155,\n \"acc_norm_stderr\": 0.003737138752336941\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.04793724854411022,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.04793724854411022\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6074074074074074,\n \"acc_stderr\": 0.0421850621536888,\n \"acc_norm\": 0.6074074074074074,\n \"acc_norm_stderr\": 0.0421850621536888\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.625,\n \"acc_stderr\": 0.039397364351956274,\n \"acc_norm\": 0.625,\n \"acc_norm_stderr\": 0.039397364351956274\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.58,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.58,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6754716981132075,\n \"acc_stderr\": 0.028815615713432115,\n \"acc_norm\": 0.6754716981132075,\n \"acc_norm_stderr\": 0.028815615713432115\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.03745554791462456,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.03745554791462456\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.41,\n \"acc_stderr\": 0.04943110704237101,\n \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.04943110704237101\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.630057803468208,\n \"acc_stderr\": 0.0368122963339432,\n \"acc_norm\": 0.630057803468208,\n \"acc_norm_stderr\": 0.0368122963339432\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.3627450980392157,\n \"acc_stderr\": 0.04784060704105653,\n \"acc_norm\": 0.3627450980392157,\n \"acc_norm_stderr\": 0.04784060704105653\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5574468085106383,\n \"acc_stderr\": 0.03246956919789958,\n \"acc_norm\": 0.5574468085106383,\n \"acc_norm_stderr\": 0.03246956919789958\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.45614035087719296,\n \"acc_stderr\": 0.04685473041907789,\n \"acc_norm\": 0.45614035087719296,\n \"acc_norm_stderr\": 0.04685473041907789\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5655172413793104,\n \"acc_stderr\": 0.04130740879555497,\n \"acc_norm\": 0.5655172413793104,\n \"acc_norm_stderr\": 0.04130740879555497\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3888888888888889,\n \"acc_stderr\": 0.025107425481137282,\n \"acc_norm\": 0.3888888888888889,\n \"acc_norm_stderr\": 0.025107425481137282\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3888888888888889,\n \"acc_stderr\": 0.04360314860077459,\n \"acc_norm\": 0.3888888888888889,\n \"acc_norm_stderr\": 0.04360314860077459\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7677419354838709,\n \"acc_stderr\": 0.024022256130308235,\n \"acc_norm\": 0.7677419354838709,\n \"acc_norm_stderr\": 0.024022256130308235\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4975369458128079,\n \"acc_stderr\": 0.03517945038691063,\n \"acc_norm\": 0.4975369458128079,\n \"acc_norm_stderr\": 0.03517945038691063\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7575757575757576,\n \"acc_stderr\": 0.03346409881055953,\n \"acc_norm\": 0.7575757575757576,\n \"acc_norm_stderr\": 0.03346409881055953\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8080808080808081,\n \"acc_stderr\": 0.02805779167298901,\n \"acc_norm\": 0.8080808080808081,\n \"acc_norm_stderr\": 0.02805779167298901\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8756476683937824,\n \"acc_stderr\": 0.02381447708659355,\n \"acc_norm\": 0.8756476683937824,\n \"acc_norm_stderr\": 0.02381447708659355\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6743589743589744,\n \"acc_stderr\": 0.02375966576741229,\n \"acc_norm\": 0.6743589743589744,\n \"acc_norm_stderr\": 0.02375966576741229\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.028742040903948492,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.028742040903948492\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6848739495798319,\n \"acc_stderr\": 0.03017680828897434,\n \"acc_norm\": 0.6848739495798319,\n \"acc_norm_stderr\": 0.03017680828897434\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.304635761589404,\n \"acc_stderr\": 0.03757949922943343,\n \"acc_norm\": 0.304635761589404,\n \"acc_norm_stderr\": 0.03757949922943343\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8220183486238533,\n \"acc_stderr\": 0.01639943636661292,\n \"acc_norm\": 0.8220183486238533,\n \"acc_norm_stderr\": 0.01639943636661292\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5092592592592593,\n \"acc_stderr\": 0.034093869469927006,\n \"acc_norm\": 0.5092592592592593,\n \"acc_norm_stderr\": 0.034093869469927006\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8235294117647058,\n \"acc_stderr\": 0.026756401538078966,\n \"acc_norm\": 0.8235294117647058,\n \"acc_norm_stderr\": 0.026756401538078966\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7890295358649789,\n \"acc_stderr\": 0.02655837250266192,\n \"acc_norm\": 0.7890295358649789,\n \"acc_norm_stderr\": 0.02655837250266192\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7085201793721974,\n \"acc_stderr\": 0.03050028317654585,\n \"acc_norm\": 0.7085201793721974,\n \"acc_norm_stderr\": 0.03050028317654585\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7786259541984732,\n \"acc_stderr\": 0.0364129708131373,\n \"acc_norm\": 0.7786259541984732,\n \"acc_norm_stderr\": 0.0364129708131373\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7768595041322314,\n \"acc_stderr\": 0.03800754475228732,\n \"acc_norm\": 0.7768595041322314,\n \"acc_norm_stderr\": 0.03800754475228732\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7592592592592593,\n \"acc_stderr\": 0.04133119440243838,\n \"acc_norm\": 0.7592592592592593,\n \"acc_norm_stderr\": 0.04133119440243838\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7607361963190185,\n \"acc_stderr\": 0.0335195387952127,\n \"acc_norm\": 0.7607361963190185,\n \"acc_norm_stderr\": 0.0335195387952127\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4642857142857143,\n \"acc_stderr\": 0.04733667890053756,\n \"acc_norm\": 0.4642857142857143,\n \"acc_norm_stderr\": 0.04733667890053756\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7961165048543689,\n \"acc_stderr\": 0.03989139859531771,\n \"acc_norm\": 0.7961165048543689,\n \"acc_norm_stderr\": 0.03989139859531771\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8632478632478633,\n \"acc_stderr\": 0.022509033937077816,\n \"acc_norm\": 0.8632478632478633,\n \"acc_norm_stderr\": 0.022509033937077816\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8160919540229885,\n \"acc_stderr\": 0.01385372417092253,\n \"acc_norm\": 0.8160919540229885,\n \"acc_norm_stderr\": 0.01385372417092253\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6820809248554913,\n \"acc_stderr\": 0.025070713719153186,\n \"acc_norm\": 0.6820809248554913,\n \"acc_norm_stderr\": 0.025070713719153186\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.37318435754189944,\n \"acc_stderr\": 0.016175692013381968,\n \"acc_norm\": 0.37318435754189944,\n \"acc_norm_stderr\": 0.016175692013381968\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.0256468630971379,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.0256468630971379\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.684887459807074,\n \"acc_stderr\": 0.026385273703464482,\n \"acc_norm\": 0.684887459807074,\n \"acc_norm_stderr\": 0.026385273703464482\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7253086419753086,\n \"acc_stderr\": 0.024836057868294677,\n \"acc_norm\": 0.7253086419753086,\n \"acc_norm_stderr\": 0.024836057868294677\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.48226950354609927,\n \"acc_stderr\": 0.02980873964223777,\n \"acc_norm\": 0.48226950354609927,\n \"acc_norm_stderr\": 0.02980873964223777\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.45436766623207303,\n \"acc_stderr\": 0.012716941720734804,\n \"acc_norm\": 0.45436766623207303,\n \"acc_norm_stderr\": 0.012716941720734804\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6838235294117647,\n \"acc_stderr\": 0.028245687391462927,\n \"acc_norm\": 0.6838235294117647,\n \"acc_norm_stderr\": 0.028245687391462927\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6503267973856209,\n \"acc_stderr\": 0.01929196189506638,\n \"acc_norm\": 0.6503267973856209,\n \"acc_norm_stderr\": 0.01929196189506638\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.04494290866252091,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.04494290866252091\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7387755102040816,\n \"acc_stderr\": 0.02812342933514278,\n \"acc_norm\": 0.7387755102040816,\n \"acc_norm_stderr\": 0.02812342933514278\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.845771144278607,\n \"acc_stderr\": 0.025538433368578334,\n \"acc_norm\": 0.845771144278607,\n \"acc_norm_stderr\": 0.025538433368578334\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.83,\n \"acc_stderr\": 0.0377525168068637,\n \"acc_norm\": 0.83,\n \"acc_norm_stderr\": 0.0377525168068637\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5542168674698795,\n \"acc_stderr\": 0.038695433234721015,\n \"acc_norm\": 0.5542168674698795,\n \"acc_norm_stderr\": 0.038695433234721015\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8245614035087719,\n \"acc_stderr\": 0.029170885500727668,\n \"acc_norm\": 0.8245614035087719,\n \"acc_norm_stderr\": 0.029170885500727668\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3880048959608323,\n \"mc1_stderr\": 0.017058761501347972,\n \"mc2\": 0.5568818997417452,\n \"mc2_stderr\": 0.015517245006607807\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7640094711917916,\n \"acc_stderr\": 0.011933828850275626\n },\n \"harness|drop|3\": {\n \"em\": 0.23259228187919462,\n \"em_stderr\": 0.004326636227794088,\n \"f1\": 0.28881291946308657,\n \"f1_stderr\": 0.004306419385994737\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.11902956785443518,\n \"acc_stderr\": 0.00891970291116164\n }\n}\n```", "repo_url": "https://huggingface.co/NeverSleep/Mistral-11B-SynthIAirOmniMix", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|arc:challenge|25_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|drop|3_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|gsm8k|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hellaswag|10_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-management|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-virology|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-management|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-virology|5_2023-11-12T19-54-58.939194.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-management|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-virology|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|truthfulqa:mc|0_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["**/details_harness|winogrande|5_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-11-12T19-54-58.939194.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_11_12T19_54_58.939194", "path": ["results_2023-11-12T19-54-58.939194.parquet"]}, {"split": "latest", "path": ["results_2023-11-12T19-54-58.939194.parquet"]}]}]} | 2023-11-12T19:58:59+00:00 | []
| []
| TAGS
#region-us
|
# Dataset Card for Evaluation run of NeverSleep/Mistral-11B-SynthIAirOmniMix
## Dataset Description
- Homepage:
- Repository: URL
- Paper:
- Leaderboard: URL
- Point of Contact: clementine@URL
### Dataset Summary
Dataset automatically created during the evaluation run of model NeverSleep/Mistral-11B-SynthIAirOmniMix on the Open LLM Leaderboard.
The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).
To load the details from a run, you can for instance do the following:
## Latest results
These are the latest results from run 2023-11-12T19:54:58.939194(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
### Supported Tasks and Leaderboards
### Languages
## Dataset Structure
### Data Instances
### Data Fields
### Data Splits
## Dataset Creation
### Curation Rationale
### Source Data
#### Initial Data Collection and Normalization
#### Who are the source language producers?
### Annotations
#### Annotation process
#### Who are the annotators?
### Personal and Sensitive Information
## Considerations for Using the Data
### Social Impact of Dataset
### Discussion of Biases
### Other Known Limitations
## Additional Information
### Dataset Curators
### Licensing Information
### Contributions
| [
"# Dataset Card for Evaluation run of NeverSleep/Mistral-11B-SynthIAirOmniMix",
"## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL",
"### Dataset Summary\n\nDataset automatically created during the evaluation run of model NeverSleep/Mistral-11B-SynthIAirOmniMix on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:",
"## Latest results\n\nThese are the latest results from run 2023-11-12T19:54:58.939194(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):",
"### Supported Tasks and Leaderboards",
"### Languages",
"## Dataset Structure",
"### Data Instances",
"### Data Fields",
"### Data Splits",
"## Dataset Creation",
"### Curation Rationale",
"### Source Data",
"#### Initial Data Collection and Normalization",
"#### Who are the source language producers?",
"### Annotations",
"#### Annotation process",
"#### Who are the annotators?",
"### Personal and Sensitive Information",
"## Considerations for Using the Data",
"### Social Impact of Dataset",
"### Discussion of Biases",
"### Other Known Limitations",
"## Additional Information",
"### Dataset Curators",
"### Licensing Information",
"### Contributions"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for Evaluation run of NeverSleep/Mistral-11B-SynthIAirOmniMix",
"## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL",
"### Dataset Summary\n\nDataset automatically created during the evaluation run of model NeverSleep/Mistral-11B-SynthIAirOmniMix on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:",
"## Latest results\n\nThese are the latest results from run 2023-11-12T19:54:58.939194(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):",
"### Supported Tasks and Leaderboards",
"### Languages",
"## Dataset Structure",
"### Data Instances",
"### Data Fields",
"### Data Splits",
"## Dataset Creation",
"### Curation Rationale",
"### Source Data",
"#### Initial Data Collection and Normalization",
"#### Who are the source language producers?",
"### Annotations",
"#### Annotation process",
"#### Who are the annotators?",
"### Personal and Sensitive Information",
"## Considerations for Using the Data",
"### Social Impact of Dataset",
"### Discussion of Biases",
"### Other Known Limitations",
"## Additional Information",
"### Dataset Curators",
"### Licensing Information",
"### Contributions"
]
| [
6,
28,
31,
177,
67,
10,
4,
6,
6,
5,
5,
5,
7,
4,
10,
10,
5,
5,
9,
8,
8,
7,
8,
7,
5,
6,
6,
5
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of NeverSleep/Mistral-11B-SynthIAirOmniMix## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model NeverSleep/Mistral-11B-SynthIAirOmniMix on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-11-12T19:54:58.939194(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions"
]
|
37b172ead9370ed4564573b19e1af4629369f960 | # Dataset Card for "BigEarthNet-S2-v1.0"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | danielz01/BigEarthNet-S2-v1.0 | [
"region:us"
]
| 2023-11-12T20:17:15+00:00 | {"configs": [{"config_name": "s2-rgb", "data_files": [{"split": "test", "path": "s2-rgb/test-*"}, {"split": "val", "path": "s2-rgb/val-*"}, {"split": "train", "path": "s2-rgb/train-*"}]}], "dataset_info": {"config_name": "s2-rgb", "features": [{"name": "img", "dtype": "image"}, {"name": "labels", "sequence": "string"}, {"name": "coordinates", "struct": [{"name": "lrx", "dtype": "int64"}, {"name": "lry", "dtype": "int64"}, {"name": "ulx", "dtype": "int64"}, {"name": "uly", "dtype": "int64"}]}, {"name": "projection", "dtype": "string"}, {"name": "tile_source", "dtype": "string"}, {"name": "acquisition_date", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 3453114936.75, "num_examples": 125866}, {"name": "val", "num_bytes": 3393628600.625, "num_examples": 123723}, {"name": "train", "num_bytes": 7391482704.125, "num_examples": 269695}], "download_size": 13839792533, "dataset_size": 14238226241.5}} | 2023-11-13T09:11:20+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "BigEarthNet-S2-v1.0"
More Information needed | [
"# Dataset Card for \"BigEarthNet-S2-v1.0\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"BigEarthNet-S2-v1.0\"\n\nMore Information needed"
]
| [
6,
19
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"BigEarthNet-S2-v1.0\"\n\nMore Information needed"
]
|
29859c6ead839d732b87af734a92c1b0f3e8287c | # Dataset Card for "ultimus_docs_dataset"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | SaffalPoosh/ultimus_docs_dataset | [
"region:us"
]
| 2023-11-12T20:30:49+00:00 | {"dataset_info": {"features": [{"name": "ultimus_docs", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 680470, "num_examples": 479}], "download_size": 245252, "dataset_size": 680470}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2023-11-12T20:30:55+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "ultimus_docs_dataset"
More Information needed | [
"# Dataset Card for \"ultimus_docs_dataset\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"ultimus_docs_dataset\"\n\nMore Information needed"
]
| [
6,
18
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"ultimus_docs_dataset\"\n\nMore Information needed"
]
|
9d689d95c754f4497e39deba2f06d81e395718ed |
# Zephyr-7B-Beta Customer Support Chatbot
<!-- Provide a quick summary of the dataset. -->
This dataset card aims to be a base template for new datasets. It has been generated using [this raw template](https://github.com/huggingface/huggingface_hub/blob/main/src/huggingface_hub/templates/datasetcard_template.md?plain=1).
## Introduction
Welcome to the `zephyr-7b-beta-invoices` repository! This project leverages the Zephyr-7B-Beta model trained on the "Bitext-Customer-Support-LLM-Chatbot-Training-Dataset" to create a state-of-the-art customer support chatbot. Our goal is to provide an efficient and accurate chatbot for handling invoice-related queries.
## Dataset Overview
The dataset used for training is `bitext/Bitext-customer-support-llm-chatbot-training-dataset`. It's tailored for customer support scenarios, focusing on conversations related to invoice queries, clarifications, and resolutions.
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Forked by:** [Bitext Customer Support](https://huggingface.co/datasets/bitext/Bitext-customer-support-llm-chatbot-training-dataset)
- **Edited by:** [Erfan Varedi](https://linkedin.com/in/erfanvaredi)
## Model Details
[zephyr-7b-beta](https://huggingface.co/HuggingFaceH4/zephyr-7b-beta) is a cutting-edge language model specifically tuned for customer support interactions. It understands context, manages conversation flow, and provides accurate responses to invoice-related inquiries.
## Installation and Setup
To use the chatbot, install the necessary packages:
```bash
pip install datasets
```
## Quick Start Guide
To quickly set up and run the chatbot, use the following script:
```python
# Import libraries
from datasets import load_dataset
# Load dataset
ds = load_dataset('erfanvaredi/zephyr-7b-beta-invoices')
df_dataset = ds["train"].to_pandas()
df_dataset.head()
# Example of text
print(df_dataset['text'].iloc[85])
# <|system|>
# You are a support chatbot who helps with user queries chatbot who always responds in the style of a professional.</s>
# <|user|>
# what do I have to do to cancel order {{Order Number}}?</s>
# <|assistant|>
# I perceive that you're looking for guidance on canceling order {{Order Number}}. To assist you with the cancellation process, please follow these steps:
# 1. Log In to Your Account: Begin by signing in to our {{Online Company Portal Info}} using your credentials.
# 2. Locate Your Order: Once you're in, navigate to the '{{Online Order Interaction}}' or '{{Online Order Interaction}}' section.
# 3. Find the Relevant Order: Look for the order number {{Order Number}} within this section.
# 4. Initiate the Cancellation: Click on the order to open the details and locate the '{{Online Order Interaction}}' option.
# 5. Complete the Cancellation Process: Follow the prompts and provide any required information to finalize the cancellation.
# If you encounter any difficulties during this process or have any further questions, please don't hesitate to reach out to our dedicated team. We are available during {{Customer Support Hours}} at {{Customer Support Phone Number}}, or you can connect with us through the Live Chat feature on our {{Website URL}}. We value your satisfaction and are here to assist you every step of the way.
```
## License
This project is licensed under the CDLA-Sharing-1.0 License.
## Contact
For questions or collaboration, please reach out to me at [Linkedin](https://linkedin.com/in/erfanvaredi). | erfanvaredi/zephyr-7b-beta-invoices | [
"task_categories:text-classification",
"task_categories:conversational",
"task_categories:text-generation",
"size_categories:10K<n<100K",
"language:en",
"license:cdla-sharing-1.0",
"customer",
"customer-support",
"region:us"
]
| 2023-11-12T20:35:02+00:00 | {"language": ["en"], "license": "cdla-sharing-1.0", "size_categories": ["10K<n<100K"], "task_categories": ["text-classification", "conversational", "text-generation"], "pretty_name": "Zephyr Invoince Dataset", "tags": ["customer", "customer-support"]} | 2023-11-12T21:03:19+00:00 | []
| [
"en"
]
| TAGS
#task_categories-text-classification #task_categories-conversational #task_categories-text-generation #size_categories-10K<n<100K #language-English #license-cdla-sharing-1.0 #customer #customer-support #region-us
|
# Zephyr-7B-Beta Customer Support Chatbot
This dataset card aims to be a base template for new datasets. It has been generated using this raw template.
## Introduction
Welcome to the 'zephyr-7b-beta-invoices' repository! This project leverages the Zephyr-7B-Beta model trained on the "Bitext-Customer-Support-LLM-Chatbot-Training-Dataset" to create a state-of-the-art customer support chatbot. Our goal is to provide an efficient and accurate chatbot for handling invoice-related queries.
## Dataset Overview
The dataset used for training is 'bitext/Bitext-customer-support-llm-chatbot-training-dataset'. It's tailored for customer support scenarios, focusing on conversations related to invoice queries, clarifications, and resolutions.
### Dataset Description
- Forked by: Bitext Customer Support
- Edited by: Erfan Varedi
## Model Details
zephyr-7b-beta is a cutting-edge language model specifically tuned for customer support interactions. It understands context, manages conversation flow, and provides accurate responses to invoice-related inquiries.
## Installation and Setup
To use the chatbot, install the necessary packages:
## Quick Start Guide
To quickly set up and run the chatbot, use the following script:
## License
This project is licensed under the CDLA-Sharing-1.0 License.
## Contact
For questions or collaboration, please reach out to me at Linkedin. | [
"# Zephyr-7B-Beta Customer Support Chatbot\n\n\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.",
"## Introduction\nWelcome to the 'zephyr-7b-beta-invoices' repository! This project leverages the Zephyr-7B-Beta model trained on the \"Bitext-Customer-Support-LLM-Chatbot-Training-Dataset\" to create a state-of-the-art customer support chatbot. Our goal is to provide an efficient and accurate chatbot for handling invoice-related queries.",
"## Dataset Overview\nThe dataset used for training is 'bitext/Bitext-customer-support-llm-chatbot-training-dataset'. It's tailored for customer support scenarios, focusing on conversations related to invoice queries, clarifications, and resolutions.",
"### Dataset Description\n\n\n\n\n- Forked by: Bitext Customer Support\n- Edited by: Erfan Varedi",
"## Model Details\nzephyr-7b-beta is a cutting-edge language model specifically tuned for customer support interactions. It understands context, manages conversation flow, and provides accurate responses to invoice-related inquiries.",
"## Installation and Setup\nTo use the chatbot, install the necessary packages:",
"## Quick Start Guide\nTo quickly set up and run the chatbot, use the following script:",
"## License\nThis project is licensed under the CDLA-Sharing-1.0 License.",
"## Contact\nFor questions or collaboration, please reach out to me at Linkedin."
]
| [
"TAGS\n#task_categories-text-classification #task_categories-conversational #task_categories-text-generation #size_categories-10K<n<100K #language-English #license-cdla-sharing-1.0 #customer #customer-support #region-us \n",
"# Zephyr-7B-Beta Customer Support Chatbot\n\n\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.",
"## Introduction\nWelcome to the 'zephyr-7b-beta-invoices' repository! This project leverages the Zephyr-7B-Beta model trained on the \"Bitext-Customer-Support-LLM-Chatbot-Training-Dataset\" to create a state-of-the-art customer support chatbot. Our goal is to provide an efficient and accurate chatbot for handling invoice-related queries.",
"## Dataset Overview\nThe dataset used for training is 'bitext/Bitext-customer-support-llm-chatbot-training-dataset'. It's tailored for customer support scenarios, focusing on conversations related to invoice queries, clarifications, and resolutions.",
"### Dataset Description\n\n\n\n\n- Forked by: Bitext Customer Support\n- Edited by: Erfan Varedi",
"## Model Details\nzephyr-7b-beta is a cutting-edge language model specifically tuned for customer support interactions. It understands context, manages conversation flow, and provides accurate responses to invoice-related inquiries.",
"## Installation and Setup\nTo use the chatbot, install the necessary packages:",
"## Quick Start Guide\nTo quickly set up and run the chatbot, use the following script:",
"## License\nThis project is licensed under the CDLA-Sharing-1.0 License.",
"## Contact\nFor questions or collaboration, please reach out to me at Linkedin."
]
| [
75,
39,
100,
69,
23,
53,
17,
19,
18,
17
]
| [
"passage: TAGS\n#task_categories-text-classification #task_categories-conversational #task_categories-text-generation #size_categories-10K<n<100K #language-English #license-cdla-sharing-1.0 #customer #customer-support #region-us \n# Zephyr-7B-Beta Customer Support Chatbot\n\n\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.## Introduction\nWelcome to the 'zephyr-7b-beta-invoices' repository! This project leverages the Zephyr-7B-Beta model trained on the \"Bitext-Customer-Support-LLM-Chatbot-Training-Dataset\" to create a state-of-the-art customer support chatbot. Our goal is to provide an efficient and accurate chatbot for handling invoice-related queries.## Dataset Overview\nThe dataset used for training is 'bitext/Bitext-customer-support-llm-chatbot-training-dataset'. It's tailored for customer support scenarios, focusing on conversations related to invoice queries, clarifications, and resolutions.### Dataset Description\n\n\n\n\n- Forked by: Bitext Customer Support\n- Edited by: Erfan Varedi## Model Details\nzephyr-7b-beta is a cutting-edge language model specifically tuned for customer support interactions. It understands context, manages conversation flow, and provides accurate responses to invoice-related inquiries.## Installation and Setup\nTo use the chatbot, install the necessary packages:## Quick Start Guide\nTo quickly set up and run the chatbot, use the following script:## License\nThis project is licensed under the CDLA-Sharing-1.0 License.## Contact\nFor questions or collaboration, please reach out to me at Linkedin."
]
|
f2cb782c8c29eb2ce69099bbad67413299533f74 | # Dataset Card for "mnli-mock-contrastive-axes-ii"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | iamroot/mnli-mock-contrastive-axes-ii | [
"region:us"
]
| 2023-11-12T20:48:04+00:00 | {"dataset_info": {"features": [{"name": "label", "dtype": {"class_label": {"names": {"0": "entailment", "1": "neutral", "2": "contradiction"}}}}, {"name": "text_a", "dtype": "string"}, {"name": "text_b", "dtype": "string"}, {"name": "prompt", "dtype": "string"}, {"name": "text_a_embedding", "sequence": "float32"}, {"name": "text_b_embedding", "sequence": "float32"}, {"name": "prompt_embedding", "sequence": "float32"}], "splits": [{"name": "train", "num_bytes": 2892065589, "num_examples": 304513}], "download_size": 3435433919, "dataset_size": 2892065589}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2023-11-12T20:49:35+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "mnli-mock-contrastive-axes-ii"
More Information needed | [
"# Dataset Card for \"mnli-mock-contrastive-axes-ii\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"mnli-mock-contrastive-axes-ii\"\n\nMore Information needed"
]
| [
6,
24
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"mnli-mock-contrastive-axes-ii\"\n\nMore Information needed"
]
|
0682115a351da5efed751d712719688c2e937b64 | # Dataset Card for "seizure_detection_224x224_raw_frequency"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | JLB-JLB/seizure_detection_224x224_raw_frequency | [
"region:us"
]
| 2023-11-12T20:49:53+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}, {"split": "eval", "path": "data/eval-*"}, {"split": "test_bckg_events", "path": "data/test_bckg_events-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "epoch_index", "dtype": "int32"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "bckg", "1": "seiz"}}}}], "splits": [{"name": "train", "num_bytes": 2654825157.304, "num_examples": 93128}, {"name": "test", "num_bytes": 898252847.927854, "num_examples": 31384}, {"name": "eval", "num_bytes": 598524001.8931462, "num_examples": 20923}, {"name": "test_bckg_events", "num_bytes": 9520809814.634, "num_examples": 338634}], "download_size": 13707064997, "dataset_size": 13672411821.759}} | 2023-11-12T22:04:01+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "seizure_detection_224x224_raw_frequency"
More Information needed | [
"# Dataset Card for \"seizure_detection_224x224_raw_frequency\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"seizure_detection_224x224_raw_frequency\"\n\nMore Information needed"
]
| [
6,
28
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"seizure_detection_224x224_raw_frequency\"\n\nMore Information needed"
]
|
757c9648859e0c90b541aaecd30eeda5670ac4f0 | # Dataset Card for "ca_housing"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | jxie/ca_housing | [
"region:us"
]
| 2023-11-12T21:00:41+00:00 | {"dataset_info": {"features": [{"name": "inputs", "sequence": "float64"}, {"name": "label", "dtype": "float64"}], "splits": [{"name": "train", "num_bytes": 1003884, "num_examples": 13209}, {"name": "val", "num_bytes": 251028, "num_examples": 3303}, {"name": "test", "num_bytes": 313728, "num_examples": 4128}], "download_size": 1125679, "dataset_size": 1568640}} | 2023-11-12T21:00:46+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "ca_housing"
More Information needed | [
"# Dataset Card for \"ca_housing\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"ca_housing\"\n\nMore Information needed"
]
| [
6,
14
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"ca_housing\"\n\nMore Information needed"
]
|
d473532bea7754c49de8419e59529fcdc7b14b55 |
The animation was independently produced by Shinji Aramaki and his manga club during their time at Okayama University. The animation premiered at the URACON III sci-fi convention in 1984.
More information can be found on MyAnimeList https://myanimelist.net/anime/42390/Uracon_III_Opening_Animation
More caps can be found on our youtube https://capsekai.tumblr.com/ | Capsekai/Uracon | [
"task_categories:text-classification",
"size_categories:1K<n<10K",
"language:en",
"license:creativeml-openrail-m",
"art",
"region:us"
]
| 2023-11-12T21:28:55+00:00 | {"language": ["en"], "license": "creativeml-openrail-m", "size_categories": ["1K<n<10K"], "task_categories": ["text-classification"], "tags": ["art"]} | 2023-11-12T21:36:14+00:00 | []
| [
"en"
]
| TAGS
#task_categories-text-classification #size_categories-1K<n<10K #language-English #license-creativeml-openrail-m #art #region-us
|
The animation was independently produced by Shinji Aramaki and his manga club during their time at Okayama University. The animation premiered at the URACON III sci-fi convention in 1984.
More information can be found on MyAnimeList URL
More caps can be found on our youtube URL | []
| [
"TAGS\n#task_categories-text-classification #size_categories-1K<n<10K #language-English #license-creativeml-openrail-m #art #region-us \n"
]
| [
47
]
| [
"passage: TAGS\n#task_categories-text-classification #size_categories-1K<n<10K #language-English #license-creativeml-openrail-m #art #region-us \n"
]
|
7882fdb98c53a6a5ae982513928a32f6cdcd728f |
# Dataset Card for Evaluation run of PulsarAI/CollectiveCognition-v1.1-Nebula-7B
## Dataset Description
- **Homepage:**
- **Repository:** https://huggingface.co/PulsarAI/CollectiveCognition-v1.1-Nebula-7B
- **Paper:**
- **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
- **Point of Contact:** [email protected]
### Dataset Summary
Dataset automatically created during the evaluation run of model [PulsarAI/CollectiveCognition-v1.1-Nebula-7B](https://huggingface.co/PulsarAI/CollectiveCognition-v1.1-Nebula-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_PulsarAI__CollectiveCognition-v1.1-Nebula-7B_public",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2023-11-12T21:42:17.063541](https://huggingface.co/datasets/open-llm-leaderboard/details_PulsarAI__CollectiveCognition-v1.1-Nebula-7B_public/blob/main/results_2023-11-12T21-42-17.063541.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.5655902624582015,
"acc_stderr": 0.033540567370804734,
"acc_norm": 0.5747445580416879,
"acc_norm_stderr": 0.03431067576831402,
"mc1": 0.38555691554467564,
"mc1_stderr": 0.01703883901059167,
"mc2": 0.5353024010333743,
"mc2_stderr": 0.015743888224866397,
"em": 0.35675335570469796,
"em_stderr": 0.004905829488253491,
"f1": 0.4216977768456382,
"f1_stderr": 0.0047367493845716785
},
"harness|arc:challenge|25": {
"acc": 0.5324232081911263,
"acc_stderr": 0.014580637569995421,
"acc_norm": 0.5810580204778157,
"acc_norm_stderr": 0.014418106953639013
},
"harness|hellaswag|10": {
"acc": 0.6309500099581756,
"acc_stderr": 0.004815613144385404,
"acc_norm": 0.8239394542919737,
"acc_norm_stderr": 0.0038009327705977565
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.37,
"acc_stderr": 0.04852365870939099,
"acc_norm": 0.37,
"acc_norm_stderr": 0.04852365870939099
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.5555555555555556,
"acc_stderr": 0.04292596718256981,
"acc_norm": 0.5555555555555556,
"acc_norm_stderr": 0.04292596718256981
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.5986842105263158,
"acc_stderr": 0.03988903703336284,
"acc_norm": 0.5986842105263158,
"acc_norm_stderr": 0.03988903703336284
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.43,
"acc_stderr": 0.04975698519562428,
"acc_norm": 0.43,
"acc_norm_stderr": 0.04975698519562428
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.6188679245283019,
"acc_stderr": 0.029890609686286623,
"acc_norm": 0.6188679245283019,
"acc_norm_stderr": 0.029890609686286623
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.6319444444444444,
"acc_stderr": 0.040329990539607175,
"acc_norm": 0.6319444444444444,
"acc_norm_stderr": 0.040329990539607175
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.38,
"acc_stderr": 0.04878317312145632,
"acc_norm": 0.38,
"acc_norm_stderr": 0.04878317312145632
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.45,
"acc_stderr": 0.049999999999999996,
"acc_norm": 0.45,
"acc_norm_stderr": 0.049999999999999996
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.5433526011560693,
"acc_stderr": 0.03798106566014498,
"acc_norm": 0.5433526011560693,
"acc_norm_stderr": 0.03798106566014498
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.30392156862745096,
"acc_stderr": 0.04576665403207763,
"acc_norm": 0.30392156862745096,
"acc_norm_stderr": 0.04576665403207763
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.68,
"acc_stderr": 0.04688261722621505,
"acc_norm": 0.68,
"acc_norm_stderr": 0.04688261722621505
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.49361702127659574,
"acc_stderr": 0.03268335899936337,
"acc_norm": 0.49361702127659574,
"acc_norm_stderr": 0.03268335899936337
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.4473684210526316,
"acc_stderr": 0.04677473004491199,
"acc_norm": 0.4473684210526316,
"acc_norm_stderr": 0.04677473004491199
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5448275862068965,
"acc_stderr": 0.04149886942192117,
"acc_norm": 0.5448275862068965,
"acc_norm_stderr": 0.04149886942192117
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.3915343915343915,
"acc_stderr": 0.02513809138885108,
"acc_norm": 0.3915343915343915,
"acc_norm_stderr": 0.02513809138885108
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.38095238095238093,
"acc_stderr": 0.04343525428949098,
"acc_norm": 0.38095238095238093,
"acc_norm_stderr": 0.04343525428949098
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695236,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695236
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.6483870967741936,
"acc_stderr": 0.027162537826948458,
"acc_norm": 0.6483870967741936,
"acc_norm_stderr": 0.027162537826948458
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.45320197044334976,
"acc_stderr": 0.03502544650845872,
"acc_norm": 0.45320197044334976,
"acc_norm_stderr": 0.03502544650845872
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.57,
"acc_stderr": 0.04975698519562428,
"acc_norm": 0.57,
"acc_norm_stderr": 0.04975698519562428
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7333333333333333,
"acc_stderr": 0.03453131801885417,
"acc_norm": 0.7333333333333333,
"acc_norm_stderr": 0.03453131801885417
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.7676767676767676,
"acc_stderr": 0.030088629490217487,
"acc_norm": 0.7676767676767676,
"acc_norm_stderr": 0.030088629490217487
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8238341968911918,
"acc_stderr": 0.02749350424454806,
"acc_norm": 0.8238341968911918,
"acc_norm_stderr": 0.02749350424454806
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.5615384615384615,
"acc_stderr": 0.025158266016868592,
"acc_norm": 0.5615384615384615,
"acc_norm_stderr": 0.025158266016868592
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.2740740740740741,
"acc_stderr": 0.027195934804085626,
"acc_norm": 0.2740740740740741,
"acc_norm_stderr": 0.027195934804085626
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.5588235294117647,
"acc_stderr": 0.0322529423239964,
"acc_norm": 0.5588235294117647,
"acc_norm_stderr": 0.0322529423239964
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.3443708609271523,
"acc_stderr": 0.038796870240733264,
"acc_norm": 0.3443708609271523,
"acc_norm_stderr": 0.038796870240733264
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.7614678899082569,
"acc_stderr": 0.018272575810231867,
"acc_norm": 0.7614678899082569,
"acc_norm_stderr": 0.018272575810231867
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.39351851851851855,
"acc_stderr": 0.03331747876370312,
"acc_norm": 0.39351851851851855,
"acc_norm_stderr": 0.03331747876370312
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.7205882352941176,
"acc_stderr": 0.03149328104507957,
"acc_norm": 0.7205882352941176,
"acc_norm_stderr": 0.03149328104507957
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.729957805907173,
"acc_stderr": 0.028900721906293426,
"acc_norm": 0.729957805907173,
"acc_norm_stderr": 0.028900721906293426
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.6681614349775785,
"acc_stderr": 0.03160295143776679,
"acc_norm": 0.6681614349775785,
"acc_norm_stderr": 0.03160295143776679
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.6564885496183206,
"acc_stderr": 0.041649760719448786,
"acc_norm": 0.6564885496183206,
"acc_norm_stderr": 0.041649760719448786
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.7603305785123967,
"acc_stderr": 0.03896878985070417,
"acc_norm": 0.7603305785123967,
"acc_norm_stderr": 0.03896878985070417
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.6944444444444444,
"acc_stderr": 0.044531975073749834,
"acc_norm": 0.6944444444444444,
"acc_norm_stderr": 0.044531975073749834
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.6871165644171779,
"acc_stderr": 0.036429145782924055,
"acc_norm": 0.6871165644171779,
"acc_norm_stderr": 0.036429145782924055
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.36607142857142855,
"acc_stderr": 0.0457237235873743,
"acc_norm": 0.36607142857142855,
"acc_norm_stderr": 0.0457237235873743
},
"harness|hendrycksTest-management|5": {
"acc": 0.7378640776699029,
"acc_stderr": 0.04354631077260597,
"acc_norm": 0.7378640776699029,
"acc_norm_stderr": 0.04354631077260597
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.811965811965812,
"acc_stderr": 0.025598193686652265,
"acc_norm": 0.811965811965812,
"acc_norm_stderr": 0.025598193686652265
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.66,
"acc_stderr": 0.04760952285695237,
"acc_norm": 0.66,
"acc_norm_stderr": 0.04760952285695237
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.7713920817369093,
"acc_stderr": 0.015016884698539892,
"acc_norm": 0.7713920817369093,
"acc_norm_stderr": 0.015016884698539892
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.6184971098265896,
"acc_stderr": 0.0261521986197268,
"acc_norm": 0.6184971098265896,
"acc_norm_stderr": 0.0261521986197268
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.22793296089385476,
"acc_stderr": 0.014030149950805098,
"acc_norm": 0.22793296089385476,
"acc_norm_stderr": 0.014030149950805098
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.6405228758169934,
"acc_stderr": 0.027475969910660952,
"acc_norm": 0.6405228758169934,
"acc_norm_stderr": 0.027475969910660952
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.6334405144694534,
"acc_stderr": 0.027368078243971646,
"acc_norm": 0.6334405144694534,
"acc_norm_stderr": 0.027368078243971646
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.6820987654320988,
"acc_stderr": 0.02591006352824088,
"acc_norm": 0.6820987654320988,
"acc_norm_stderr": 0.02591006352824088
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.42907801418439717,
"acc_stderr": 0.02952591430255856,
"acc_norm": 0.42907801418439717,
"acc_norm_stderr": 0.02952591430255856
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.4315514993481095,
"acc_stderr": 0.012650007999463888,
"acc_norm": 0.4315514993481095,
"acc_norm_stderr": 0.012650007999463888
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.5257352941176471,
"acc_stderr": 0.030332578094555033,
"acc_norm": 0.5257352941176471,
"acc_norm_stderr": 0.030332578094555033
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6078431372549019,
"acc_stderr": 0.019751726508762637,
"acc_norm": 0.6078431372549019,
"acc_norm_stderr": 0.019751726508762637
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6545454545454545,
"acc_stderr": 0.04554619617541054,
"acc_norm": 0.6545454545454545,
"acc_norm_stderr": 0.04554619617541054
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.5755102040816327,
"acc_stderr": 0.031642094879429414,
"acc_norm": 0.5755102040816327,
"acc_norm_stderr": 0.031642094879429414
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.7512437810945274,
"acc_stderr": 0.030567675938916718,
"acc_norm": 0.7512437810945274,
"acc_norm_stderr": 0.030567675938916718
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.8,
"acc_stderr": 0.04020151261036845,
"acc_norm": 0.8,
"acc_norm_stderr": 0.04020151261036845
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5180722891566265,
"acc_stderr": 0.038899512528272166,
"acc_norm": 0.5180722891566265,
"acc_norm_stderr": 0.038899512528272166
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.783625730994152,
"acc_stderr": 0.03158149539338734,
"acc_norm": 0.783625730994152,
"acc_norm_stderr": 0.03158149539338734
},
"harness|truthfulqa:mc|0": {
"mc1": 0.38555691554467564,
"mc1_stderr": 0.01703883901059167,
"mc2": 0.5353024010333743,
"mc2_stderr": 0.015743888224866397
},
"harness|winogrande|5": {
"acc": 0.7371744277821626,
"acc_stderr": 0.012370922527262008
},
"harness|drop|3": {
"em": 0.35675335570469796,
"em_stderr": 0.004905829488253491,
"f1": 0.4216977768456382,
"f1_stderr": 0.0047367493845716785
},
"harness|gsm8k|5": {
"acc": 0.09552691432903715,
"acc_stderr": 0.008096605771155759
}
}
```
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
[More Information Needed] | open-llm-leaderboard/details_PulsarAI__CollectiveCognition-v1.1-Nebula-7B | [
"region:us"
]
| 2023-11-12T21:45:18+00:00 | {"pretty_name": "Evaluation run of PulsarAI/CollectiveCognition-v1.1-Nebula-7B", "dataset_summary": "Dataset automatically created during the evaluation run of model [PulsarAI/CollectiveCognition-v1.1-Nebula-7B](https://huggingface.co/PulsarAI/CollectiveCognition-v1.1-Nebula-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_PulsarAI__CollectiveCognition-v1.1-Nebula-7B_public\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-11-12T21:42:17.063541](https://huggingface.co/datasets/open-llm-leaderboard/details_PulsarAI__CollectiveCognition-v1.1-Nebula-7B_public/blob/main/results_2023-11-12T21-42-17.063541.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5655902624582015,\n \"acc_stderr\": 0.033540567370804734,\n \"acc_norm\": 0.5747445580416879,\n \"acc_norm_stderr\": 0.03431067576831402,\n \"mc1\": 0.38555691554467564,\n \"mc1_stderr\": 0.01703883901059167,\n \"mc2\": 0.5353024010333743,\n \"mc2_stderr\": 0.015743888224866397,\n \"em\": 0.35675335570469796,\n \"em_stderr\": 0.004905829488253491,\n \"f1\": 0.4216977768456382,\n \"f1_stderr\": 0.0047367493845716785\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5324232081911263,\n \"acc_stderr\": 0.014580637569995421,\n \"acc_norm\": 0.5810580204778157,\n \"acc_norm_stderr\": 0.014418106953639013\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6309500099581756,\n \"acc_stderr\": 0.004815613144385404,\n \"acc_norm\": 0.8239394542919737,\n \"acc_norm_stderr\": 0.0038009327705977565\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5555555555555556,\n \"acc_stderr\": 0.04292596718256981,\n \"acc_norm\": 0.5555555555555556,\n \"acc_norm_stderr\": 0.04292596718256981\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.5986842105263158,\n \"acc_stderr\": 0.03988903703336284,\n \"acc_norm\": 0.5986842105263158,\n \"acc_norm_stderr\": 0.03988903703336284\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.04975698519562428,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.04975698519562428\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6188679245283019,\n \"acc_stderr\": 0.029890609686286623,\n \"acc_norm\": 0.6188679245283019,\n \"acc_norm_stderr\": 0.029890609686286623\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6319444444444444,\n \"acc_stderr\": 0.040329990539607175,\n \"acc_norm\": 0.6319444444444444,\n \"acc_norm_stderr\": 0.040329990539607175\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145632,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145632\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.049999999999999996,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.049999999999999996\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5433526011560693,\n \"acc_stderr\": 0.03798106566014498,\n \"acc_norm\": 0.5433526011560693,\n \"acc_norm_stderr\": 0.03798106566014498\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.30392156862745096,\n \"acc_stderr\": 0.04576665403207763,\n \"acc_norm\": 0.30392156862745096,\n \"acc_norm_stderr\": 0.04576665403207763\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.04688261722621505,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.04688261722621505\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.49361702127659574,\n \"acc_stderr\": 0.03268335899936337,\n \"acc_norm\": 0.49361702127659574,\n \"acc_norm_stderr\": 0.03268335899936337\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4473684210526316,\n \"acc_stderr\": 0.04677473004491199,\n \"acc_norm\": 0.4473684210526316,\n \"acc_norm_stderr\": 0.04677473004491199\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5448275862068965,\n \"acc_stderr\": 0.04149886942192117,\n \"acc_norm\": 0.5448275862068965,\n \"acc_norm_stderr\": 0.04149886942192117\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3915343915343915,\n \"acc_stderr\": 0.02513809138885108,\n \"acc_norm\": 0.3915343915343915,\n \"acc_norm_stderr\": 0.02513809138885108\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.38095238095238093,\n \"acc_stderr\": 0.04343525428949098,\n \"acc_norm\": 0.38095238095238093,\n \"acc_norm_stderr\": 0.04343525428949098\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6483870967741936,\n \"acc_stderr\": 0.027162537826948458,\n \"acc_norm\": 0.6483870967741936,\n \"acc_norm_stderr\": 0.027162537826948458\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.45320197044334976,\n \"acc_stderr\": 0.03502544650845872,\n \"acc_norm\": 0.45320197044334976,\n \"acc_norm_stderr\": 0.03502544650845872\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.04975698519562428,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.04975698519562428\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7333333333333333,\n \"acc_stderr\": 0.03453131801885417,\n \"acc_norm\": 0.7333333333333333,\n \"acc_norm_stderr\": 0.03453131801885417\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7676767676767676,\n \"acc_stderr\": 0.030088629490217487,\n \"acc_norm\": 0.7676767676767676,\n \"acc_norm_stderr\": 0.030088629490217487\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8238341968911918,\n \"acc_stderr\": 0.02749350424454806,\n \"acc_norm\": 0.8238341968911918,\n \"acc_norm_stderr\": 0.02749350424454806\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5615384615384615,\n \"acc_stderr\": 0.025158266016868592,\n \"acc_norm\": 0.5615384615384615,\n \"acc_norm_stderr\": 0.025158266016868592\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2740740740740741,\n \"acc_stderr\": 0.027195934804085626,\n \"acc_norm\": 0.2740740740740741,\n \"acc_norm_stderr\": 0.027195934804085626\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.5588235294117647,\n \"acc_stderr\": 0.0322529423239964,\n \"acc_norm\": 0.5588235294117647,\n \"acc_norm_stderr\": 0.0322529423239964\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3443708609271523,\n \"acc_stderr\": 0.038796870240733264,\n \"acc_norm\": 0.3443708609271523,\n \"acc_norm_stderr\": 0.038796870240733264\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7614678899082569,\n \"acc_stderr\": 0.018272575810231867,\n \"acc_norm\": 0.7614678899082569,\n \"acc_norm_stderr\": 0.018272575810231867\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.39351851851851855,\n \"acc_stderr\": 0.03331747876370312,\n \"acc_norm\": 0.39351851851851855,\n \"acc_norm_stderr\": 0.03331747876370312\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7205882352941176,\n \"acc_stderr\": 0.03149328104507957,\n \"acc_norm\": 0.7205882352941176,\n \"acc_norm_stderr\": 0.03149328104507957\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.729957805907173,\n \"acc_stderr\": 0.028900721906293426,\n \"acc_norm\": 0.729957805907173,\n \"acc_norm_stderr\": 0.028900721906293426\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6681614349775785,\n \"acc_stderr\": 0.03160295143776679,\n \"acc_norm\": 0.6681614349775785,\n \"acc_norm_stderr\": 0.03160295143776679\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.6564885496183206,\n \"acc_stderr\": 0.041649760719448786,\n \"acc_norm\": 0.6564885496183206,\n \"acc_norm_stderr\": 0.041649760719448786\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7603305785123967,\n \"acc_stderr\": 0.03896878985070417,\n \"acc_norm\": 0.7603305785123967,\n \"acc_norm_stderr\": 0.03896878985070417\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6944444444444444,\n \"acc_stderr\": 0.044531975073749834,\n \"acc_norm\": 0.6944444444444444,\n \"acc_norm_stderr\": 0.044531975073749834\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6871165644171779,\n \"acc_stderr\": 0.036429145782924055,\n \"acc_norm\": 0.6871165644171779,\n \"acc_norm_stderr\": 0.036429145782924055\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.36607142857142855,\n \"acc_stderr\": 0.0457237235873743,\n \"acc_norm\": 0.36607142857142855,\n \"acc_norm_stderr\": 0.0457237235873743\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7378640776699029,\n \"acc_stderr\": 0.04354631077260597,\n \"acc_norm\": 0.7378640776699029,\n \"acc_norm_stderr\": 0.04354631077260597\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.811965811965812,\n \"acc_stderr\": 0.025598193686652265,\n \"acc_norm\": 0.811965811965812,\n \"acc_norm_stderr\": 0.025598193686652265\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.66,\n \"acc_stderr\": 0.04760952285695237,\n \"acc_norm\": 0.66,\n \"acc_norm_stderr\": 0.04760952285695237\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7713920817369093,\n \"acc_stderr\": 0.015016884698539892,\n \"acc_norm\": 0.7713920817369093,\n \"acc_norm_stderr\": 0.015016884698539892\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6184971098265896,\n \"acc_stderr\": 0.0261521986197268,\n \"acc_norm\": 0.6184971098265896,\n \"acc_norm_stderr\": 0.0261521986197268\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.22793296089385476,\n \"acc_stderr\": 0.014030149950805098,\n \"acc_norm\": 0.22793296089385476,\n \"acc_norm_stderr\": 0.014030149950805098\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6405228758169934,\n \"acc_stderr\": 0.027475969910660952,\n \"acc_norm\": 0.6405228758169934,\n \"acc_norm_stderr\": 0.027475969910660952\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6334405144694534,\n \"acc_stderr\": 0.027368078243971646,\n \"acc_norm\": 0.6334405144694534,\n \"acc_norm_stderr\": 0.027368078243971646\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6820987654320988,\n \"acc_stderr\": 0.02591006352824088,\n \"acc_norm\": 0.6820987654320988,\n \"acc_norm_stderr\": 0.02591006352824088\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.42907801418439717,\n \"acc_stderr\": 0.02952591430255856,\n \"acc_norm\": 0.42907801418439717,\n \"acc_norm_stderr\": 0.02952591430255856\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4315514993481095,\n \"acc_stderr\": 0.012650007999463888,\n \"acc_norm\": 0.4315514993481095,\n \"acc_norm_stderr\": 0.012650007999463888\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5257352941176471,\n \"acc_stderr\": 0.030332578094555033,\n \"acc_norm\": 0.5257352941176471,\n \"acc_norm_stderr\": 0.030332578094555033\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6078431372549019,\n \"acc_stderr\": 0.019751726508762637,\n \"acc_norm\": 0.6078431372549019,\n \"acc_norm_stderr\": 0.019751726508762637\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6545454545454545,\n \"acc_stderr\": 0.04554619617541054,\n \"acc_norm\": 0.6545454545454545,\n \"acc_norm_stderr\": 0.04554619617541054\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.5755102040816327,\n \"acc_stderr\": 0.031642094879429414,\n \"acc_norm\": 0.5755102040816327,\n \"acc_norm_stderr\": 0.031642094879429414\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7512437810945274,\n \"acc_stderr\": 0.030567675938916718,\n \"acc_norm\": 0.7512437810945274,\n \"acc_norm_stderr\": 0.030567675938916718\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.04020151261036845,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.04020151261036845\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5180722891566265,\n \"acc_stderr\": 0.038899512528272166,\n \"acc_norm\": 0.5180722891566265,\n \"acc_norm_stderr\": 0.038899512528272166\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.783625730994152,\n \"acc_stderr\": 0.03158149539338734,\n \"acc_norm\": 0.783625730994152,\n \"acc_norm_stderr\": 0.03158149539338734\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.38555691554467564,\n \"mc1_stderr\": 0.01703883901059167,\n \"mc2\": 0.5353024010333743,\n \"mc2_stderr\": 0.015743888224866397\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7371744277821626,\n \"acc_stderr\": 0.012370922527262008\n },\n \"harness|drop|3\": {\n \"em\": 0.35675335570469796,\n \"em_stderr\": 0.004905829488253491,\n \"f1\": 0.4216977768456382,\n \"f1_stderr\": 0.0047367493845716785\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.09552691432903715,\n \"acc_stderr\": 0.008096605771155759\n }\n}\n```", "repo_url": "https://huggingface.co/PulsarAI/CollectiveCognition-v1.1-Nebula-7B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|arc:challenge|25_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|drop|3_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|gsm8k|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hellaswag|10_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-management|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-virology|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-management|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-virology|5_2023-11-12T21-42-17.063541.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-management|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-virology|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|truthfulqa:mc|0_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["**/details_harness|winogrande|5_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-11-12T21-42-17.063541.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_11_12T21_42_17.063541", "path": ["results_2023-11-12T21-42-17.063541.parquet"]}, {"split": "latest", "path": ["results_2023-11-12T21-42-17.063541.parquet"]}]}]} | 2023-11-12T21:46:18+00:00 | []
| []
| TAGS
#region-us
|
# Dataset Card for Evaluation run of PulsarAI/CollectiveCognition-v1.1-Nebula-7B
## Dataset Description
- Homepage:
- Repository: URL
- Paper:
- Leaderboard: URL
- Point of Contact: clementine@URL
### Dataset Summary
Dataset automatically created during the evaluation run of model PulsarAI/CollectiveCognition-v1.1-Nebula-7B on the Open LLM Leaderboard.
The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).
To load the details from a run, you can for instance do the following:
## Latest results
These are the latest results from run 2023-11-12T21:42:17.063541(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
### Supported Tasks and Leaderboards
### Languages
## Dataset Structure
### Data Instances
### Data Fields
### Data Splits
## Dataset Creation
### Curation Rationale
### Source Data
#### Initial Data Collection and Normalization
#### Who are the source language producers?
### Annotations
#### Annotation process
#### Who are the annotators?
### Personal and Sensitive Information
## Considerations for Using the Data
### Social Impact of Dataset
### Discussion of Biases
### Other Known Limitations
## Additional Information
### Dataset Curators
### Licensing Information
### Contributions
| [
"# Dataset Card for Evaluation run of PulsarAI/CollectiveCognition-v1.1-Nebula-7B",
"## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL",
"### Dataset Summary\n\nDataset automatically created during the evaluation run of model PulsarAI/CollectiveCognition-v1.1-Nebula-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:",
"## Latest results\n\nThese are the latest results from run 2023-11-12T21:42:17.063541(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):",
"### Supported Tasks and Leaderboards",
"### Languages",
"## Dataset Structure",
"### Data Instances",
"### Data Fields",
"### Data Splits",
"## Dataset Creation",
"### Curation Rationale",
"### Source Data",
"#### Initial Data Collection and Normalization",
"#### Who are the source language producers?",
"### Annotations",
"#### Annotation process",
"#### Who are the annotators?",
"### Personal and Sensitive Information",
"## Considerations for Using the Data",
"### Social Impact of Dataset",
"### Discussion of Biases",
"### Other Known Limitations",
"## Additional Information",
"### Dataset Curators",
"### Licensing Information",
"### Contributions"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for Evaluation run of PulsarAI/CollectiveCognition-v1.1-Nebula-7B",
"## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL",
"### Dataset Summary\n\nDataset automatically created during the evaluation run of model PulsarAI/CollectiveCognition-v1.1-Nebula-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:",
"## Latest results\n\nThese are the latest results from run 2023-11-12T21:42:17.063541(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):",
"### Supported Tasks and Leaderboards",
"### Languages",
"## Dataset Structure",
"### Data Instances",
"### Data Fields",
"### Data Splits",
"## Dataset Creation",
"### Curation Rationale",
"### Source Data",
"#### Initial Data Collection and Normalization",
"#### Who are the source language producers?",
"### Annotations",
"#### Annotation process",
"#### Who are the annotators?",
"### Personal and Sensitive Information",
"## Considerations for Using the Data",
"### Social Impact of Dataset",
"### Discussion of Biases",
"### Other Known Limitations",
"## Additional Information",
"### Dataset Curators",
"### Licensing Information",
"### Contributions"
]
| [
6,
27,
31,
176,
67,
10,
4,
6,
6,
5,
5,
5,
7,
4,
10,
10,
5,
5,
9,
8,
8,
7,
8,
7,
5,
6,
6,
5
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of PulsarAI/CollectiveCognition-v1.1-Nebula-7B## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model PulsarAI/CollectiveCognition-v1.1-Nebula-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-11-12T21:42:17.063541(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions"
]
|
4dd0ae1d1c41a5898804ac7599893885e03cce39 | # Dataset Card for "pltcdom"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | minoruskore/pltcdom | [
"region:us"
]
| 2023-11-12T21:49:35+00:00 | {"dataset_info": {"features": [{"name": "imagen", "dtype": "binary"}, {"name": "etiquetas", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 81445051, "num_examples": 495}], "download_size": 81221490, "dataset_size": 81445051}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2023-11-12T22:04:43+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "pltcdom"
More Information needed | [
"# Dataset Card for \"pltcdom\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"pltcdom\"\n\nMore Information needed"
]
| [
6,
14
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"pltcdom\"\n\nMore Information needed"
]
|
a9789448ffd2a8cbd89c12bd846eded11e7d22d9 |
Shiro No Kiseki is a lesser known anime cenetered around the subject of the Shinsengumi.
Capped by https://capsekai.tumblr.com/ | Capsekai/ShironoKiseki | [
"task_categories:text-classification",
"size_categories:n<1K",
"language:en",
"license:creativeml-openrail-m",
"anime",
"art",
"region:us"
]
| 2023-11-12T21:51:23+00:00 | {"language": ["en"], "license": "creativeml-openrail-m", "size_categories": ["n<1K"], "task_categories": ["text-classification"], "pretty_name": "Shiro No Kiseki", "tags": ["anime", "art"]} | 2023-11-12T21:56:28+00:00 | []
| [
"en"
]
| TAGS
#task_categories-text-classification #size_categories-n<1K #language-English #license-creativeml-openrail-m #anime #art #region-us
|
Shiro No Kiseki is a lesser known anime cenetered around the subject of the Shinsengumi.
Capped by URL | []
| [
"TAGS\n#task_categories-text-classification #size_categories-n<1K #language-English #license-creativeml-openrail-m #anime #art #region-us \n"
]
| [
48
]
| [
"passage: TAGS\n#task_categories-text-classification #size_categories-n<1K #language-English #license-creativeml-openrail-m #anime #art #region-us \n"
]
|
69f552e9d4f5e9ffd51211a044995f7b94329aa6 | # Dataset Card for "DIOR-RSVG"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | danielz01/DIOR-RSVG | [
"region:us"
]
| 2023-11-12T21:57:37+00:00 | {"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "path", "dtype": "string"}, {"name": "objects", "struct": [{"name": "bbox", "sequence": {"sequence": "int32"}}, {"name": "captions", "sequence": "string"}, {"name": "categories", "sequence": "string"}, {"name": "categories_normalized", "sequence": "string"}]}], "splits": [{"name": "train", "num_bytes": 4808350968.376, "num_examples": 14748}, {"name": "val", "num_bytes": 1170343155.264, "num_examples": 3457}, {"name": "test", "num_bytes": 2083503162.625, "num_examples": 6125}], "download_size": 7863851320, "dataset_size": 8062197286.265}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "val", "path": "data/val-*"}, {"split": "test", "path": "data/test-*"}]}]} | 2023-11-12T22:08:29+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "DIOR-RSVG"
More Information needed | [
"# Dataset Card for \"DIOR-RSVG\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"DIOR-RSVG\"\n\nMore Information needed"
]
| [
6,
15
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"DIOR-RSVG\"\n\nMore Information needed"
]
|
f6f3d8b32f30bae22fa1606350459d94adcff8af | # Dataset Card for "hacker_news_prompt_completion"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | higgsfield/hacker_news_prompt_completion | [
"region:us"
]
| 2023-11-12T22:36:36+00:00 | {"dataset_info": {"features": [{"name": "prompt", "dtype": "string"}, {"name": "completion", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 187231365, "num_examples": 100000}], "download_size": 77649586, "dataset_size": 187231365}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2023-11-12T22:41:05+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "hacker_news_prompt_completion"
More Information needed | [
"# Dataset Card for \"hacker_news_prompt_completion\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"hacker_news_prompt_completion\"\n\nMore Information needed"
]
| [
6,
22
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"hacker_news_prompt_completion\"\n\nMore Information needed"
]
|
d23542892a9a5f8811a8fa6ca0d6d0b260737581 |
# 🙋🏻♂️Welcome to 🧑🏻🚀Tonic's🚀🚰Easy🔴Reddit🔥!

This is every "best reddit_question_best_answers" appended and produced according to the following template :
```json
{"prompt": "This is the first prompt", "completion": "This is the first completion"}
{"prompt": "This is the second prompt", "completion": "This is the second completion"}
```

- 🌟 You can use it in shards or all together !
- 🌟 This dataset is **internally consistent** !
🤔The point is to make it easy to train models with a single correctly formatted dataset of
- **54,367,153 rows**
# Original Dataset :
[nreimers/reddit_question_best_answers](https://huggingface.co/datasets/nreimers/reddit_question_best_answers)
# How To Use :
Combine random shards in random quantities to produce a very high quality conversational training dataset for fine tuning or try combining rows line by line to save memory by running the following code:
```python
# see selectbyline.py
import os
import random
# Directory containing the shard JSONL files
shard_directory = "/path/to/shard/directory"
# Get a list of all JSONL files in the directory
shard_files = [f for f in os.listdir(shard_directory) if f.endswith('.jsonl')]
# Function to read a random number of lines (between min_lines and max_lines) from a file
def read_random_lines(filename, min_lines, max_lines):
selected_lines = []
num_lines = random.randint(min_lines, max_lines)
with open(filename, 'r') as file:
lines = list(file)
if len(lines) <= num_lines:
return lines
selected_lines = random.sample(lines, num_lines)
return selected_lines
# Function to combine shards
def combine_shards(output_filename, num_combinations):
with open(output_filename, 'w') as output_file:
for _ in range(num_combinations):
selected_shard_file = random.choice(shard_files)
lines = read_random_lines(os.path.join(shard_directory, selected_shard_file), 5000, 10000)
output_file.writelines(lines)
# Example usage
combine_shards("/path/to/output/combined_shards.jsonl", 10)
```
# Pre-Processing
```python
import json
import os
import gzip
import logging
import re
import random
# Setup basic logging
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
def clean_string(s):
"""Remove special characters, keeping only alphanumeric characters and spaces."""
if isinstance(s, list):
# Extract text from each dictionary in the list and join into a single string
s = " ".join([d.get("body", "") if isinstance(d, dict) else str(d) for d in s])
return re.sub(r'[^A-Za-z0-9 ]+', '', s)
def process_file(input_file, output_file):
try:
dataset = []
with gzip.open(input_file, 'rt') as infile:
for line in infile:
# Parse the JSON line
try:
data = json.loads(line)
except json.JSONDecodeError:
logging.error(f"Invalid JSON format in {input_file}: {line}")
continue
# Extract and clean the 'body' and 'answers' fields
prompt = clean_string(data.get("body", ""))
completion = clean_string(data.get("answers", ""))
# For each body found, make a new row and duplicate the prompt for it
if isinstance(data.get("body", ""), list):
for body in data.get("body", []):
cleaned_body = clean_string(body)
dataset.append({"prompt": cleaned_body, "completion": completion})
else:
dataset.append({"prompt": prompt, "completion": completion})
# Shuffle the dataset
random.shuffle(dataset)
# Write the shuffled dataset to the output file
with open(output_file, 'a') as outfile:
for item in dataset:
json.dump(item, outfile)
outfile.write('\n')
logging.info(f"Processed file: {input_file}")
except Exception as e:
logging.error(f"Error processing file {input_file}: {e}")
def process_files(file_list, output_dir):
# Ensure the output directory exists
if not os.path.exists(output_dir):
os.makedirs(output_dir)
# Create a single output file path
output_file = os.path.join(output_dir, 'synthesized_dataset.jsonl')
for input_file in file_list:
process_file(input_file, output_file)
# Update with your list of .gz file paths
file_list = [r'C:\Users\MeMyself\FILES, r"C:\Users\MeMyself\FILES" ] # Update with your list of .gz file paths
output_dir = r'C:\Users\MeMyself\reddit_question_best_answers\processed'
process_files(file_list, output_dir)
```
#### **sharding script** :
```python
import json
import os
def read_dataset(file_path):
try:
with open(file_path, 'r') as file:
data = [json.loads(line) for line in file]
print(f"Dataset loaded successfully from {file_path}.")
return data
except Exception as e:
print(f"Error reading dataset from {file_path}: {e}")
return []
def shard_dataset(dataset, num_shards):
shard_size = len(dataset) // num_shards
shards = [dataset[i:i + shard_size] for i in range(0, len(dataset), shard_size)]
if len(shards) > num_shards:
shards[num_shards - 1].extend(shards.pop())
print(f"Dataset sharded into {num_shards} parts.")
return shards
def write_shards(shards, output_dir):
if not os.path.exists(output_dir):
os.makedirs(output_dir)
print(f"Created output directory at {output_dir}.")
for i, shard in enumerate(shards):
shard_file = os.path.join(output_dir, f'shard_{i+1}.jsonl')
with open(shard_file, 'w') as file:
for item in shard:
json.dump(item, file)
file.write('\n')
print(f"Shard {i+1} written to {shard_file}.")
def main():
input_file = 'path_to_processed_dataset.jsonl' # Update with your processed dataset file path
output_dir = 'sharded_dataset' # Update with your output directory for shards
num_shards = 33
dataset = read_dataset(input_file)
if dataset:
shards = shard_dataset(dataset, num_shards)
write_shards(shards, output_dir)
print("All shards have been successfully written.")
else:
print("No dataset to process.")
if __name__ == "__main__":
main()
```
### Disclaimer :
🌟Re-format this dataset before use.
🌟Probably there's a **big problem with the token count** on these long answers 😉
🌟**Good Luck !** 🧑🏻🚀🚀 | Tonic/EasyReddit | [
"size_categories:10M<n<100M",
"language:en",
"license:mit",
"not-for-all-audiences",
"chemistry",
"biology",
"finance",
"legal",
"music",
"art",
"code",
"climate",
"medical",
"region:us"
]
| 2023-11-12T23:18:16+00:00 | {"language": ["en"], "license": "mit", "size_categories": ["10M<n<100M"], "pretty_name": "Easy Reddit", "tags": ["not-for-all-audiences", "chemistry", "biology", "finance", "legal", "music", "art", "code", "climate", "medical"], "configs": [{"config_name": "shards", "data_files": [{"split": "train", "path": ["shard_1.jsonl", "shard_2.jsonl", "shard_3.jsonl", "shard_4.jsonl", "shard_5.jsonl", "shard_6.jsonl", "shard_7.jsonl", "shard_8.jsonl", "shard_9.jsonl", "shard_10.jsonl", "shard_11.jsonl", "shard_12.jsonl", "shard_13.jsonl", "shard_14.jsonl", "shard_15.jsonl", "shard_16.jsonl", "shard_17.jsonl", "shard_18.jsonl", "shard_19.jsonl", "shard_20.jsonl", "shard_21.jsonl", "shard_22.jsonl", "shard_23.jsonl", "shard_24.jsonl", "shard_25.jsonl", "shard_26.jsonl", "shard_27.jsonl", "shard_28.jsonl", "shard_29.jsonl", "shard_30.jsonl", "shard_31.jsonl", "shard_32.jsonl", "shard_33.jsonl", "shard_34.jsonl"]}]}]} | 2023-11-13T12:52:23+00:00 | []
| [
"en"
]
| TAGS
#size_categories-10M<n<100M #language-English #license-mit #not-for-all-audiences #chemistry #biology #finance #legal #music #art #code #climate #medical #region-us
|
# ️Welcome to Tonic'sEasyReddit!
!image/png
This is every "best reddit_question_best_answers" appended and produced according to the following template :
!image/png
- You can use it in shards or all together !
- This dataset is internally consistent !
The point is to make it easy to train models with a single correctly formatted dataset of
- 54,367,153 rows
# Original Dataset :
nreimers/reddit_question_best_answers
# How To Use :
Combine random shards in random quantities to produce a very high quality conversational training dataset for fine tuning or try combining rows line by line to save memory by running the following code:
# Pre-Processing
#### sharding script :
### Disclaimer :
Re-format this dataset before use.
Probably there's a big problem with the token count on these long answers
Good Luck ! | [
"# ️Welcome to Tonic'sEasyReddit!\n\n\n\n\n!image/png\n\n\nThis is every \"best reddit_question_best_answers\" appended and produced according to the following template :\n\n\n\n!image/png\n\n\n- You can use it in shards or all together !\n\n- This dataset is internally consistent !\n\n\n\nThe point is to make it easy to train models with a single correctly formatted dataset of\n\n- 54,367,153 rows",
"# Original Dataset :\n\nnreimers/reddit_question_best_answers",
"# How To Use : \n\nCombine random shards in random quantities to produce a very high quality conversational training dataset for fine tuning or try combining rows line by line to save memory by running the following code:",
"# Pre-Processing",
"#### sharding script :",
"### Disclaimer :\n\nRe-format this dataset before use.\n\nProbably there's a big problem with the token count on these long answers \n\nGood Luck ! "
]
| [
"TAGS\n#size_categories-10M<n<100M #language-English #license-mit #not-for-all-audiences #chemistry #biology #finance #legal #music #art #code #climate #medical #region-us \n",
"# ️Welcome to Tonic'sEasyReddit!\n\n\n\n\n!image/png\n\n\nThis is every \"best reddit_question_best_answers\" appended and produced according to the following template :\n\n\n\n!image/png\n\n\n- You can use it in shards or all together !\n\n- This dataset is internally consistent !\n\n\n\nThe point is to make it easy to train models with a single correctly formatted dataset of\n\n- 54,367,153 rows",
"# Original Dataset :\n\nnreimers/reddit_question_best_answers",
"# How To Use : \n\nCombine random shards in random quantities to produce a very high quality conversational training dataset for fine tuning or try combining rows line by line to save memory by running the following code:",
"# Pre-Processing",
"#### sharding script :",
"### Disclaimer :\n\nRe-format this dataset before use.\n\nProbably there's a big problem with the token count on these long answers \n\nGood Luck ! "
]
| [
61,
100,
19,
49,
6,
6,
35
]
| [
"passage: TAGS\n#size_categories-10M<n<100M #language-English #license-mit #not-for-all-audiences #chemistry #biology #finance #legal #music #art #code #climate #medical #region-us \n# ️Welcome to Tonic'sEasyReddit!\n\n\n\n\n!image/png\n\n\nThis is every \"best reddit_question_best_answers\" appended and produced according to the following template :\n\n\n\n!image/png\n\n\n- You can use it in shards or all together !\n\n- This dataset is internally consistent !\n\n\n\nThe point is to make it easy to train models with a single correctly formatted dataset of\n\n- 54,367,153 rows# Original Dataset :\n\nnreimers/reddit_question_best_answers# How To Use : \n\nCombine random shards in random quantities to produce a very high quality conversational training dataset for fine tuning or try combining rows line by line to save memory by running the following code:# Pre-Processing#### sharding script :### Disclaimer :\n\nRe-format this dataset before use.\n\nProbably there's a big problem with the token count on these long answers \n\nGood Luck ! "
]
|
8d55e4121ec1f1e3148c5a7a4cab2653f8a20ce6 | # Dataset Card for "sample"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | nadsoft/sample | [
"region:us"
]
| 2023-11-12T23:21:52+00:00 | {"dataset_info": {"features": [{"name": "audio", "dtype": "audio"}, {"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 268307293.08, "num_examples": 1670}], "download_size": 270896188, "dataset_size": 268307293.08}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2023-11-12T23:36:37+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "sample"
More Information needed | [
"# Dataset Card for \"sample\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"sample\"\n\nMore Information needed"
]
| [
6,
12
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"sample\"\n\nMore Information needed"
]
|
5e200c5733c924b04aa0e1eba41668768a9f2522 | # Dataset Card for "SDv2-GPT4Spatial-200-filtered1"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | Doub7e/SDv2-GPT4Spatial-2000-filtered1 | [
"region:us"
]
| 2023-11-12T23:24:48+00:00 | {"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "prompt", "dtype": "string"}, {"name": "T5_last_hidden_states", "sequence": {"sequence": {"sequence": "float32"}}}], "splits": [{"name": "train", "num_bytes": 2415734631.25, "num_examples": 1870}], "download_size": 1428677752, "dataset_size": 2415734631.25}} | 2023-11-12T23:42:25+00:00 | []
| []
| TAGS
#region-us
| # Dataset Card for "SDv2-GPT4Spatial-200-filtered1"
More Information needed | [
"# Dataset Card for \"SDv2-GPT4Spatial-200-filtered1\"\n\nMore Information needed"
]
| [
"TAGS\n#region-us \n",
"# Dataset Card for \"SDv2-GPT4Spatial-200-filtered1\"\n\nMore Information needed"
]
| [
6,
24
]
| [
"passage: TAGS\n#region-us \n# Dataset Card for \"SDv2-GPT4Spatial-200-filtered1\"\n\nMore Information needed"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.