sha
stringlengths
40
40
text
stringlengths
1
13.4M
id
stringlengths
2
117
tags
listlengths
1
7.91k
created_at
stringlengths
25
25
metadata
stringlengths
2
875k
last_modified
stringlengths
25
25
arxiv
listlengths
0
25
languages
listlengths
0
7.91k
tags_str
stringlengths
17
159k
text_str
stringlengths
1
447k
text_lists
listlengths
0
352
processed_texts
listlengths
1
353
tokens_length
listlengths
1
353
input_texts
listlengths
1
40
1e4bc18ae14ee5b55a8eac4d80a9c2f960661936
# Dataset Card for "uzh-hs23-etsp-eval-single-noaxislabel-bar" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
hk-kaden-kim/uzh-hs23-etsp-eval-single-noaxislabel-bar
[ "region:us" ]
2023-10-08T09:46:02+00:00
{"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "caption", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 4710127.0, "num_examples": 100}], "download_size": 4650739, "dataset_size": 4710127.0}}
2023-10-08T09:53:31+00:00
[]
[]
TAGS #region-us
# Dataset Card for "uzh-hs23-etsp-eval-single-noaxislabel-bar" More Information needed
[ "# Dataset Card for \"uzh-hs23-etsp-eval-single-noaxislabel-bar\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"uzh-hs23-etsp-eval-single-noaxislabel-bar\"\n\nMore Information needed" ]
[ 6, 32 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"uzh-hs23-etsp-eval-single-noaxislabel-bar\"\n\nMore Information needed" ]
47072e9d681988d9554c070e1bdfdee2f3301fb6
# Dataset Card for "uzh-hs23-etsp-eval-single-noaxislabel-line" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
hk-kaden-kim/uzh-hs23-etsp-eval-single-noaxislabel-line
[ "region:us" ]
2023-10-08T09:46:16+00:00
{"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "caption", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 3500006.0, "num_examples": 100}], "download_size": 3486375, "dataset_size": 3500006.0}}
2023-10-08T09:53:47+00:00
[]
[]
TAGS #region-us
# Dataset Card for "uzh-hs23-etsp-eval-single-noaxislabel-line" More Information needed
[ "# Dataset Card for \"uzh-hs23-etsp-eval-single-noaxislabel-line\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"uzh-hs23-etsp-eval-single-noaxislabel-line\"\n\nMore Information needed" ]
[ 6, 32 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"uzh-hs23-etsp-eval-single-noaxislabel-line\"\n\nMore Information needed" ]
5772947aedddd28a32a183359f7286e260c11524
# Dataset Card for "uzh-hs23-etsp-eval-single-nogrid-bar" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
hk-kaden-kim/uzh-hs23-etsp-eval-single-nogrid-bar
[ "region:us" ]
2023-10-08T09:46:27+00:00
{"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "caption", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 5078088.0, "num_examples": 100}], "download_size": 5042214, "dataset_size": 5078088.0}}
2023-10-08T09:54:02+00:00
[]
[]
TAGS #region-us
# Dataset Card for "uzh-hs23-etsp-eval-single-nogrid-bar" More Information needed
[ "# Dataset Card for \"uzh-hs23-etsp-eval-single-nogrid-bar\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"uzh-hs23-etsp-eval-single-nogrid-bar\"\n\nMore Information needed" ]
[ 6, 30 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"uzh-hs23-etsp-eval-single-nogrid-bar\"\n\nMore Information needed" ]
9f6f5ef130b4ee6b1868226d5cb5979cbef315a4
# Dataset Card for "uzh-hs23-etsp-eval-single-nogrid-line" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
hk-kaden-kim/uzh-hs23-etsp-eval-single-nogrid-line
[ "region:us" ]
2023-10-08T09:46:33+00:00
{"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "caption", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 3881934.0, "num_examples": 100}], "download_size": 3869794, "dataset_size": 3881934.0}}
2023-10-08T09:54:11+00:00
[]
[]
TAGS #region-us
# Dataset Card for "uzh-hs23-etsp-eval-single-nogrid-line" More Information needed
[ "# Dataset Card for \"uzh-hs23-etsp-eval-single-nogrid-line\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"uzh-hs23-etsp-eval-single-nogrid-line\"\n\nMore Information needed" ]
[ 6, 30 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"uzh-hs23-etsp-eval-single-nogrid-line\"\n\nMore Information needed" ]
b3661c595e814890d1063afaaa18b42923e7f7bb
# Dataset Card for "uzh-hs23-etsp-eval-single-notitle-bar" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
hk-kaden-kim/uzh-hs23-etsp-eval-single-notitle-bar
[ "region:us" ]
2023-10-08T09:46:44+00:00
{"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "caption", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 4010855.0, "num_examples": 100}], "download_size": 3980524, "dataset_size": 4010855.0}}
2023-10-08T09:54:25+00:00
[]
[]
TAGS #region-us
# Dataset Card for "uzh-hs23-etsp-eval-single-notitle-bar" More Information needed
[ "# Dataset Card for \"uzh-hs23-etsp-eval-single-notitle-bar\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"uzh-hs23-etsp-eval-single-notitle-bar\"\n\nMore Information needed" ]
[ 6, 30 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"uzh-hs23-etsp-eval-single-notitle-bar\"\n\nMore Information needed" ]
6ae86c705c547d6a10b87103b220434c03bad94c
# Dataset Card for "uzh-hs23-etsp-eval-single-notitle-line" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
hk-kaden-kim/uzh-hs23-etsp-eval-single-notitle-line
[ "region:us" ]
2023-10-08T09:46:52+00:00
{"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "caption", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 2890126.0, "num_examples": 100}], "download_size": 2878288, "dataset_size": 2890126.0}}
2023-10-08T09:54:33+00:00
[]
[]
TAGS #region-us
# Dataset Card for "uzh-hs23-etsp-eval-single-notitle-line" More Information needed
[ "# Dataset Card for \"uzh-hs23-etsp-eval-single-notitle-line\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"uzh-hs23-etsp-eval-single-notitle-line\"\n\nMore Information needed" ]
[ 6, 30 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"uzh-hs23-etsp-eval-single-notitle-line\"\n\nMore Information needed" ]
0447cc7fe2323f8962ae8bb752a5fb82290dae88
# Dataset Card for "uzh-hs23-etsp-eval-multi-base-bar" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
hk-kaden-kim/uzh-hs23-etsp-eval-multi-base-bar
[ "region:us" ]
2023-10-08T09:47:05+00:00
{"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "caption", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 5362648.0, "num_examples": 100}], "download_size": 0, "dataset_size": 5362648.0}}
2023-10-08T09:59:19+00:00
[]
[]
TAGS #region-us
# Dataset Card for "uzh-hs23-etsp-eval-multi-base-bar" More Information needed
[ "# Dataset Card for \"uzh-hs23-etsp-eval-multi-base-bar\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"uzh-hs23-etsp-eval-multi-base-bar\"\n\nMore Information needed" ]
[ 6, 28 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"uzh-hs23-etsp-eval-multi-base-bar\"\n\nMore Information needed" ]
4410dbe368f8f1812a8e20399bcdb2e7cde65fdb
# Dataset Card for "uzh-hs23-etsp-eval-multi-base-line" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
hk-kaden-kim/uzh-hs23-etsp-eval-multi-base-line
[ "region:us" ]
2023-10-08T09:47:10+00:00
{"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "caption", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 5404740.0, "num_examples": 100}], "download_size": 5387322, "dataset_size": 5404740.0}}
2023-10-08T09:59:35+00:00
[]
[]
TAGS #region-us
# Dataset Card for "uzh-hs23-etsp-eval-multi-base-line" More Information needed
[ "# Dataset Card for \"uzh-hs23-etsp-eval-multi-base-line\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"uzh-hs23-etsp-eval-multi-base-line\"\n\nMore Information needed" ]
[ 6, 28 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"uzh-hs23-etsp-eval-multi-base-line\"\n\nMore Information needed" ]
7018de04bdde7488d16a92fac0d3502ed51f3006
# Dataset Card for "uzh-hs23-etsp-eval-multi-subplot-bar" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
hk-kaden-kim/uzh-hs23-etsp-eval-multi-subplot-bar
[ "region:us" ]
2023-10-08T09:47:21+00:00
{"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "caption", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 6192425.0, "num_examples": 100}], "download_size": 6134847, "dataset_size": 6192425.0}}
2023-10-08T09:59:52+00:00
[]
[]
TAGS #region-us
# Dataset Card for "uzh-hs23-etsp-eval-multi-subplot-bar" More Information needed
[ "# Dataset Card for \"uzh-hs23-etsp-eval-multi-subplot-bar\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"uzh-hs23-etsp-eval-multi-subplot-bar\"\n\nMore Information needed" ]
[ 6, 29 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"uzh-hs23-etsp-eval-multi-subplot-bar\"\n\nMore Information needed" ]
2d8e9a69bca7e8e84f9d3bd1523523252fb7b6f2
# Dataset Card for "uzh-hs23-etsp-eval-multi-subplot-line" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
hk-kaden-kim/uzh-hs23-etsp-eval-multi-subplot-line
[ "region:us" ]
2023-10-08T09:47:27+00:00
{"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "caption", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 5443206.0, "num_examples": 100}], "download_size": 5428035, "dataset_size": 5443206.0}}
2023-10-08T10:00:11+00:00
[]
[]
TAGS #region-us
# Dataset Card for "uzh-hs23-etsp-eval-multi-subplot-line" More Information needed
[ "# Dataset Card for \"uzh-hs23-etsp-eval-multi-subplot-line\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"uzh-hs23-etsp-eval-multi-subplot-line\"\n\nMore Information needed" ]
[ 6, 29 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"uzh-hs23-etsp-eval-multi-subplot-line\"\n\nMore Information needed" ]
6c2928cb64cd0d8ec0f3790ce848048fde1911dd
# Dataset Card for ThaiSum This dataset was forked from [thaisum](https://huggingface.co/datasets/thaisum) to HF hub. ## Table of Contents - [Dataset Description](#dataset-description) - [Dataset Summary](#dataset-summary) - [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards) - [Languages](#languages) - [Dataset Structure](#dataset-structure) - [Data Instances](#data-instances) - [Data Fields](#data-fields) - [Data Splits](#data-splits) - [Dataset Creation](#dataset-creation) - [Curation Rationale](#curation-rationale) - [Source Data](#source-data) - [Annotations](#annotations) - [Personal and Sensitive Information](#personal-and-sensitive-information) - [Considerations for Using the Data](#considerations-for-using-the-data) - [Social Impact of Dataset](#social-impact-of-dataset) - [Discussion of Biases](#discussion-of-biases) - [Other Known Limitations](#other-known-limitations) - [Additional Information](#additional-information) - [Dataset Curators](#dataset-curators) - [Licensing Information](#licensing-information) - [Citation Information](#citation-information) - [Contributions](#contributions) ## Dataset Description - **Homepage:** https://github.com/nakhunchumpolsathien/ThaiSum - **Repository:** https://github.com/nakhunchumpolsathien/ThaiSum - **Paper:** - **Leaderboard:** - **Point of Contact:** https://github.com/nakhunchumpolsathien ### Dataset Summary ThaiSum is a large-scale corpus for Thai text summarization obtained from several online news websites namely Thairath, ThaiPBS, Prachathai, and The Standard. This dataset consists of over 350,000 article and summary pairs written by journalists. ### Supported Tasks and Leaderboards summarization, language modeling ### Languages Thai ## Dataset Structure ### Data Instances ``` {'body': 'กีเก ซานเชซ ฟลอเรส\xa0 กุนซือเลือดกระทิงของทีมวัตฟอร์ด\xa0 เมินประเด็นจุดโทษปัญหาในเกมพรีเมียร์ลีก อังกฤษ นัดที่แตนอาละวาดเปิดบ้านพ่าย คริสตัล พาเลซ 0-1ชี้ทีมของเขาเล่นไม่ดีพอเอง,สำนักข่าวต่างประเทศรายงานวันที่ 27 ก.ย. ว่า กีเก ซานเชซ ฟลอเรส\xa0 ผู้จัดการทีมชาวสเปน ของ แตนอาละวาด วัตฟอร์ด\xa0 ยอมรับทีมของเขาเล่นได้ไม่ดีพอเอง ในเกมพรีเมียร์ลีก อังกฤษ นัดเปิดบ้านพ่าย อินทรีผงาด คริสตัล พาเลซ 0-1 เมื่อคืนวันอาทิตย์ที่ผ่านมา,เกมนี้จุดเปลี่ยนมาอยู่ที่การได้จุดโทษในช่วงครึ่งหลังของ คริสตัล พาเลซ ซึ่งไม่ค่อยชัดเจนเท่าไหร่ว่า อัลลัน นียอม นั้นไปทำฟาล์วใส่ วิลฟรีด ซาฮา ในเขตโทษหรือไม่ แต่ผู้ตัดสินก็ชี้เป็นจุดโทษ ซึ่ง โยอัน กาบาย สังหารไม่พลาด และเป็นประตูชัยช่วยให้ คริสตัล พาเลซ เอาชนะ วัตฟอร์ด ไป 1-0 และเป็นการพ่ายแพ้ในบ้านนัดแรกของวัตฟอร์ดในฤดูกาลนี้อีกด้วย,ฟลอเรส กล่าวว่า มันเป็นเรื่องยากในการหยุดเกมรุกของคริสตัล พาเลซ ซึ่งมันอึดอัดจริงๆสำหรับเรา เราเล่นกันได้ไม่ดีนักในตอนที่ได้ครองบอล เราต้องเล่นทางริมเส้นให้มากกว่านี้ เราไม่สามารถหยุดเกมสวนกลับของพวกเขาได้ และแนวรับของเราก็ยืนไม่เป็นระเบียบสักเท่าไหร่ในช่วงครึ่งแรก ส่วนเรื่องจุดโทษการตัดสินใจขั้นสุดท้ายมันอยู่ที่ผู้ตัดสิน ซึ่งมันเป็นการตัดสินใจที่สำคัญ ผมเองก็ไม่รู้ว่าเขาตัดสินถูกหรือเปล่า บางทีมันอาจเป็นจุดที่ตัดสินเกมนี้เลย แต่เราไม่ได้แพ้เกมนี้เพราะจุดโทษ เราแพ้ในวันนี้เพราะเราเล่นไม่ดีและคริสตัล พาเลซ เล่นดีกว่าเรา เราไม่ได้มีฟอร์มการเล่นที่ดีในเกมนี้เลย', 'summary': 'กีเก ซานเชซ ฟลอเรส กุนซือเลือดกระทิงของทีมวัตฟอร์ด เมินประเด็นจุดโทษปัญหาในเกมพรีเมียร์ลีก อังกฤษ นัดที่แตนอาละวาดเปิดบ้านพ่าย คริสตัล พาเลซ 0-1ชี้ทีมของเขาเล่นไม่ดีพอเอง', 'tags': 'พรีเมียร์ลีก,วัตฟอร์ด,คริสตัล พาเลซ,กีเก ซานเชซ ฟลอเรส,ข่าวกีฬา,ข่าว,ไทยรัฐออนไลน์', 'title': 'ฟลอเรส รับ วัตฟอร์ดห่วยเองเกมพ่ายพาเลซคาบ้าน', 'type': '', 'url': 'https://www.thairath.co.th/content/528322'} ``` ### Data Fields - `title`: title of article - `body`: body of article - `summary`: summary of article - `type`: type of article, if any - `tags`: tags of article, separated by `,` - `url`: URL of article ### Data Splits train/valid/test: 358868 / 11000 / 11000 ## Dataset Creation ### Curation Rationale Sequence-to-sequence (Seq2Seq) models have shown great achievement in text summarization. However, Seq2Seq model often requires large-scale training data to achieve effective results. Although many impressive advancements in text summarization field have been made, most of summarization studies focus on resource-rich languages. The progress of Thai text summarization is still far behind. The dearth of large-scale dataset keeps Thai text summarization in its infancy. As far as our knowledge goes, there is not a large-scale dataset for Thai text summarization available anywhere. Thus, we present ThaiSum, a large-scale corpus for Thai text summarization obtained from several online news websites namely Thairath, ThaiPBS, Prachathai, and The Standard. ### Source Data #### Initial Data Collection and Normalization We used a python library named Scrapy to crawl articles from several news websites namely Thairath, Prachatai, ThaiPBS and, The Standard. We first collected news URLs provided in their sitemaps. During web-crawling, we used HTML markup and metadata available in HTML pages to identify article text, summary, headline, tags and label. Collected articles were published online from 2014 to August 2020. <br> <br> We further performed data cleansing process to minimize noisy data. We filtered out articles that their article text or summary is missing. Articles that contains article text with less than 150 words or summary with less than 15 words were removed. We also discarded articles that contain at least one of these following tags: ‘ดวง’ (horoscope), ‘นิยาย’ (novel), ‘อินสตราแกรมดารา’ (celebrity Instagram), ‘คลิปสุดฮา’(funny video) and ‘สรุปข่าว’ (highlight news). Some summaries were completely irrelevant to their original article texts. To eliminate those irrelevant summaries, we calculated abstractedness score between summary and its article text. Abstractedness score is written formally as: <br> <center><a href="https://www.codecogs.com/eqnedit.php?latex=\begin{equation}&space;\frac{|S-A|}{r}&space;\times&space;100&space;\end{equation}" target="_blank"><img src="https://latex.codecogs.com/gif.latex?\begin{equation}&space;\frac{|S-A|}{r}&space;\times&space;100&space;\end{equation}" title="\begin{equation} \frac{|S-A|}{r} \times 100 \end{equation}" /></a></center><br> <br>Where 𝑆 denotes set of article tokens. 𝐴 denotes set of summary tokens. 𝑟 denotes a total number of summary tokens. We omitted articles that have abstractedness score at 1-grams higher than 60%. <br><br> It is important to point out that we used [PyThaiNLP](https://github.com/PyThaiNLP/pythainlp), version 2.2.4, tokenizing engine = newmm, to process Thai texts in this study. It is challenging to tokenize running Thai text into words or sentences because there are not clear word/sentence delimiters in Thai language. Therefore, using different tokenization engines may result in different segment of words/sentences. After data-cleansing process, ThaiSum dataset contains over 358,000 articles. The size of this dataset is comparable to a well-known English document summarization dataset, CNN/Dily mail dataset. Moreover, we analyse the characteristics of this dataset by measuring the abstractedness level, compassion rate, and content diversity. For more details, see [thaisum_exploration.ipynb](https://github.com/nakhunchumpolsathien/ThaiSum/blob/master/thaisum_exploration.ipynb). #### Dataset Statistics ThaiSum dataset consists of 358,868 articles. Average lengths of article texts and summaries are approximately 530 and 37 words respectively. As mentioned earlier, we also collected headlines, tags and labels provided in each article. Tags are similar to keywords of the article. An article normally contains several tags but a few labels. Tags can be name of places or persons that article is about while labels indicate news category (politic, entertainment, etc.). Ultimatly, ThaiSum contains 538,059 unique tags and 59 unique labels. Note that not every article contains tags or labels. |Dataset Size| 358,868 | articles | |:---|---:|---:| |Avg. Article Length| 529.5 | words| |Avg. Summary Length | 37.3 | words| |Avg. Headline Length | 12.6 | words| |Unique Vocabulary Size | 407,355 | words| |Occurring > 10 times | 81,761 | words| |Unique News Tag Size | 538,059 | tags| |Unique News Label Size | 59 | labels| #### Who are the source language producers? Journalists of respective articles ### Annotations #### Annotation process `summary`, `type` and `tags` are created by journalists who wrote the articles and/or their publishers. #### Who are the annotators? `summary`, `type` and `tags` are created by journalists who wrote the articles and/or their publishers. ### Personal and Sensitive Information All data are public news articles. No personal and sensitive information is expected to be included. ## Considerations for Using the Data ### Social Impact of Dataset - News summarization in Thai - Language modeling for Thai news ### Discussion of Biases - [ThaiPBS](https://www.thaipbs.or.th/home) [receives funding from Thai government](https://www.bangkokbiznews.com/blog/detail/648740). - [Thairath](https://www.thairath.co.th/) is known as [the most popular newspaper in Thailand](https://mgronline.com/onlinesection/detail/9620000058532); no clear political leaning. - [The Standard](https://thestandard.co/) is a left-leaning online magazine. - [Prachathai](https://prachatai.com/) is a left-leaning, human-right-focused news site. ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [@nakhunchumpolsathien](https://github.com/nakhunchumpolsathien/) [@caramelWaffle](https://github.com/caramelWaffle) ### Licensing Information MIT License ### Citation Information ``` @mastersthesis{chumpolsathien_2020, title={Using Knowledge Distillation from Keyword Extraction to Improve the Informativeness of Neural Cross-lingual Summarization}, author={Chumpolsathien, Nakhun}, year={2020}, school={Beijing Institute of Technology} ``` ### Contributions Thanks to [@cstorm125](https://github.com/cstorm125) for adding this dataset.
pythainlp/thaisum
[ "task_categories:summarization", "task_categories:text-generation", "task_categories:fill-mask", "task_ids:language-modeling", "task_ids:masked-language-modeling", "annotations_creators:no-annotation", "language_creators:found", "multilinguality:monolingual", "size_categories:100K<n<1M", "source_datasets:original", "language:th", "license:mit", "region:us" ]
2023-10-08T10:06:14+00:00
{"annotations_creators": ["no-annotation"], "language_creators": ["found"], "language": ["th"], "license": ["mit"], "multilinguality": ["monolingual"], "size_categories": ["100K<n<1M"], "source_datasets": ["original"], "task_categories": ["summarization", "text-generation", "fill-mask"], "task_ids": ["language-modeling", "masked-language-modeling"], "pretty_name": "ThaiSum"}
2023-10-08T13:06:17+00:00
[]
[ "th" ]
TAGS #task_categories-summarization #task_categories-text-generation #task_categories-fill-mask #task_ids-language-modeling #task_ids-masked-language-modeling #annotations_creators-no-annotation #language_creators-found #multilinguality-monolingual #size_categories-100K<n<1M #source_datasets-original #language-Thai #license-mit #region-us
Dataset Card for ThaiSum ======================== This dataset was forked from thaisum to HF hub. Table of Contents ----------------- * Dataset Description + Dataset Summary + Supported Tasks and Leaderboards + Languages * Dataset Structure + Data Instances + Data Fields + Data Splits * Dataset Creation + Curation Rationale + Source Data + Annotations + Personal and Sensitive Information * Considerations for Using the Data + Social Impact of Dataset + Discussion of Biases + Other Known Limitations * Additional Information + Dataset Curators + Licensing Information + Citation Information + Contributions Dataset Description ------------------- * Homepage: URL * Repository: URL * Paper: * Leaderboard: * Point of Contact: URL ### Dataset Summary ThaiSum is a large-scale corpus for Thai text summarization obtained from several online news websites namely Thairath, ThaiPBS, Prachathai, and The Standard. This dataset consists of over 350,000 article and summary pairs written by journalists. ### Supported Tasks and Leaderboards summarization, language modeling ### Languages Thai Dataset Structure ----------------- ### Data Instances ### Data Fields * 'title': title of article * 'body': body of article * 'summary': summary of article * 'type': type of article, if any * 'tags': tags of article, separated by ',' * 'url': URL of article ### Data Splits train/valid/test: 358868 / 11000 / 11000 Dataset Creation ---------------- ### Curation Rationale Sequence-to-sequence (Seq2Seq) models have shown great achievement in text summarization. However, Seq2Seq model often requires large-scale training data to achieve effective results. Although many impressive advancements in text summarization field have been made, most of summarization studies focus on resource-rich languages. The progress of Thai text summarization is still far behind. The dearth of large-scale dataset keeps Thai text summarization in its infancy. As far as our knowledge goes, there is not a large-scale dataset for Thai text summarization available anywhere. Thus, we present ThaiSum, a large-scale corpus for Thai text summarization obtained from several online news websites namely Thairath, ThaiPBS, Prachathai, and The Standard. ### Source Data #### Initial Data Collection and Normalization We used a python library named Scrapy to crawl articles from several news websites namely Thairath, Prachatai, ThaiPBS and, The Standard. We first collected news URLs provided in their sitemaps. During web-crawling, we used HTML markup and metadata available in HTML pages to identify article text, summary, headline, tags and label. Collected articles were published online from 2014 to August 2020. We further performed data cleansing process to minimize noisy data. We filtered out articles that their article text or summary is missing. Articles that contains article text with less than 150 words or summary with less than 15 words were removed. We also discarded articles that contain at least one of these following tags: ‘ดวง’ (horoscope), ‘นิยาย’ (novel), ‘อินสตราแกรมดารา’ (celebrity Instagram), ‘คลิปสุดฮา’(funny video) and ‘สรุปข่าว’ (highlight news). Some summaries were completely irrelevant to their original article texts. To eliminate those irrelevant summaries, we calculated abstractedness score between summary and its article text. Abstractedness score is written formally as: [![](URL title=)](URL target=) Where 𝑆 denotes set of article tokens. 𝐴 denotes set of summary tokens. 𝑟 denotes a total number of summary tokens. We omitted articles that have abstractedness score at 1-grams higher than 60%. It is important to point out that we used PyThaiNLP, version 2.2.4, tokenizing engine = newmm, to process Thai texts in this study. It is challenging to tokenize running Thai text into words or sentences because there are not clear word/sentence delimiters in Thai language. Therefore, using different tokenization engines may result in different segment of words/sentences. After data-cleansing process, ThaiSum dataset contains over 358,000 articles. The size of this dataset is comparable to a well-known English document summarization dataset, CNN/Dily mail dataset. Moreover, we analyse the characteristics of this dataset by measuring the abstractedness level, compassion rate, and content diversity. For more details, see thaisum\_exploration.ipynb. #### Dataset Statistics ThaiSum dataset consists of 358,868 articles. Average lengths of article texts and summaries are approximately 530 and 37 words respectively. As mentioned earlier, we also collected headlines, tags and labels provided in each article. Tags are similar to keywords of the article. An article normally contains several tags but a few labels. Tags can be name of places or persons that article is about while labels indicate news category (politic, entertainment, etc.). Ultimatly, ThaiSum contains 538,059 unique tags and 59 unique labels. Note that not every article contains tags or labels. #### Who are the source language producers? Journalists of respective articles ### Annotations #### Annotation process 'summary', 'type' and 'tags' are created by journalists who wrote the articles and/or their publishers. #### Who are the annotators? 'summary', 'type' and 'tags' are created by journalists who wrote the articles and/or their publishers. ### Personal and Sensitive Information All data are public news articles. No personal and sensitive information is expected to be included. Considerations for Using the Data --------------------------------- ### Social Impact of Dataset * News summarization in Thai * Language modeling for Thai news ### Discussion of Biases * ThaiPBS receives funding from Thai government. * Thairath is known as the most popular newspaper in Thailand; no clear political leaning. * The Standard is a left-leaning online magazine. * Prachathai is a left-leaning, human-right-focused news site. ### Other Known Limitations Additional Information ---------------------- ### Dataset Curators @nakhunchumpolsathien @caramelWaffle ### Licensing Information MIT License ### Contributions Thanks to @cstorm125 for adding this dataset.
[ "### Dataset Summary\n\n\nThaiSum is a large-scale corpus for Thai text summarization obtained from several online news websites namely Thairath, ThaiPBS, Prachathai, and The Standard. This dataset consists of over 350,000 article and summary pairs written by journalists.", "### Supported Tasks and Leaderboards\n\n\nsummarization, language modeling", "### Languages\n\n\nThai\n\n\nDataset Structure\n-----------------", "### Data Instances", "### Data Fields\n\n\n* 'title': title of article\n* 'body': body of article\n* 'summary': summary of article\n* 'type': type of article, if any\n* 'tags': tags of article, separated by ','\n* 'url': URL of article", "### Data Splits\n\n\ntrain/valid/test: 358868 / 11000 / 11000\n\n\nDataset Creation\n----------------", "### Curation Rationale\n\n\nSequence-to-sequence (Seq2Seq) models have shown great achievement in text summarization. However, Seq2Seq model often requires large-scale training data to achieve effective results. Although many impressive advancements in text summarization field have been made, most of summarization studies focus on resource-rich languages. The progress of Thai text summarization is still far behind. The dearth of large-scale dataset keeps Thai text summarization in its infancy. As far as our knowledge goes, there is not a large-scale dataset for Thai text summarization available anywhere. Thus, we present ThaiSum, a large-scale corpus for Thai text summarization obtained from several online news websites namely Thairath, ThaiPBS, Prachathai, and The Standard.", "### Source Data", "#### Initial Data Collection and Normalization\n\n\nWe used a python library named Scrapy to crawl articles from several news websites namely Thairath, Prachatai, ThaiPBS and, The Standard. We first collected news URLs provided in their sitemaps. During web-crawling, we used HTML markup and metadata available in HTML pages to identify article text, summary, headline, tags and label. Collected articles were published online from 2014 to August 2020. \n \n\nWe further performed data cleansing process to minimize noisy data. We filtered out articles that their article text or summary is missing. Articles that contains article text with less than 150 words or summary with less than 15 words were removed. We also discarded articles that contain at least one of these following tags: ‘ดวง’ (horoscope), ‘นิยาย’ (novel), ‘อินสตราแกรมดารา’ (celebrity Instagram), ‘คลิปสุดฮา’(funny video) and ‘สรุปข่าว’ (highlight news). Some summaries were completely irrelevant to their original article texts. To eliminate those irrelevant summaries, we calculated abstractedness score between summary and its article text. Abstractedness score is written formally as: \n\n\n\n[![](URL title=)](URL target=) \n\n \nWhere 𝑆 denotes set of article tokens. 𝐴 denotes set of summary tokens. 𝑟 denotes a total number of summary tokens. We omitted articles that have abstractedness score at 1-grams higher than 60%.\n \n \n\nIt is important to point out that we used PyThaiNLP, version 2.2.4, tokenizing engine = newmm, to process Thai texts in this study. It is challenging to tokenize running Thai text into words or sentences because there are not clear word/sentence delimiters in Thai language. Therefore, using different tokenization engines may result in different segment of words/sentences.\n\n\nAfter data-cleansing process, ThaiSum dataset contains over 358,000 articles. The size of this dataset is comparable to a well-known English document summarization dataset, CNN/Dily mail dataset. Moreover, we analyse the characteristics of this dataset by measuring the abstractedness level, compassion rate, and content diversity. For more details, see thaisum\\_exploration.ipynb.", "#### Dataset Statistics\n\n\nThaiSum dataset consists of 358,868 articles. Average lengths of article texts and summaries are approximately 530 and 37 words respectively. As mentioned earlier, we also collected headlines, tags and labels provided in each article. Tags are similar to keywords of the article. An article normally contains several tags but a few labels. Tags can be name of places or persons that article is about while labels indicate news category (politic, entertainment, etc.). Ultimatly, ThaiSum contains 538,059 unique tags and 59 unique labels. Note that not every article contains tags or labels.", "#### Who are the source language producers?\n\n\nJournalists of respective articles", "### Annotations", "#### Annotation process\n\n\n'summary', 'type' and 'tags' are created by journalists who wrote the articles and/or their publishers.", "#### Who are the annotators?\n\n\n'summary', 'type' and 'tags' are created by journalists who wrote the articles and/or their publishers.", "### Personal and Sensitive Information\n\n\nAll data are public news articles. No personal and sensitive information is expected to be included.\n\n\nConsiderations for Using the Data\n---------------------------------", "### Social Impact of Dataset\n\n\n* News summarization in Thai\n* Language modeling for Thai news", "### Discussion of Biases\n\n\n* ThaiPBS receives funding from Thai government.\n* Thairath is known as the most popular newspaper in Thailand; no clear political leaning.\n* The Standard is a left-leaning online magazine.\n* Prachathai is a left-leaning, human-right-focused news site.", "### Other Known Limitations\n\n\nAdditional Information\n----------------------", "### Dataset Curators\n\n\n@nakhunchumpolsathien\n@caramelWaffle", "### Licensing Information\n\n\nMIT License", "### Contributions\n\n\nThanks to @cstorm125 for adding this dataset." ]
[ "TAGS\n#task_categories-summarization #task_categories-text-generation #task_categories-fill-mask #task_ids-language-modeling #task_ids-masked-language-modeling #annotations_creators-no-annotation #language_creators-found #multilinguality-monolingual #size_categories-100K<n<1M #source_datasets-original #language-Thai #license-mit #region-us \n", "### Dataset Summary\n\n\nThaiSum is a large-scale corpus for Thai text summarization obtained from several online news websites namely Thairath, ThaiPBS, Prachathai, and The Standard. This dataset consists of over 350,000 article and summary pairs written by journalists.", "### Supported Tasks and Leaderboards\n\n\nsummarization, language modeling", "### Languages\n\n\nThai\n\n\nDataset Structure\n-----------------", "### Data Instances", "### Data Fields\n\n\n* 'title': title of article\n* 'body': body of article\n* 'summary': summary of article\n* 'type': type of article, if any\n* 'tags': tags of article, separated by ','\n* 'url': URL of article", "### Data Splits\n\n\ntrain/valid/test: 358868 / 11000 / 11000\n\n\nDataset Creation\n----------------", "### Curation Rationale\n\n\nSequence-to-sequence (Seq2Seq) models have shown great achievement in text summarization. However, Seq2Seq model often requires large-scale training data to achieve effective results. Although many impressive advancements in text summarization field have been made, most of summarization studies focus on resource-rich languages. The progress of Thai text summarization is still far behind. The dearth of large-scale dataset keeps Thai text summarization in its infancy. As far as our knowledge goes, there is not a large-scale dataset for Thai text summarization available anywhere. Thus, we present ThaiSum, a large-scale corpus for Thai text summarization obtained from several online news websites namely Thairath, ThaiPBS, Prachathai, and The Standard.", "### Source Data", "#### Initial Data Collection and Normalization\n\n\nWe used a python library named Scrapy to crawl articles from several news websites namely Thairath, Prachatai, ThaiPBS and, The Standard. We first collected news URLs provided in their sitemaps. During web-crawling, we used HTML markup and metadata available in HTML pages to identify article text, summary, headline, tags and label. Collected articles were published online from 2014 to August 2020. \n \n\nWe further performed data cleansing process to minimize noisy data. We filtered out articles that their article text or summary is missing. Articles that contains article text with less than 150 words or summary with less than 15 words were removed. We also discarded articles that contain at least one of these following tags: ‘ดวง’ (horoscope), ‘นิยาย’ (novel), ‘อินสตราแกรมดารา’ (celebrity Instagram), ‘คลิปสุดฮา’(funny video) and ‘สรุปข่าว’ (highlight news). Some summaries were completely irrelevant to their original article texts. To eliminate those irrelevant summaries, we calculated abstractedness score between summary and its article text. Abstractedness score is written formally as: \n\n\n\n[![](URL title=)](URL target=) \n\n \nWhere 𝑆 denotes set of article tokens. 𝐴 denotes set of summary tokens. 𝑟 denotes a total number of summary tokens. We omitted articles that have abstractedness score at 1-grams higher than 60%.\n \n \n\nIt is important to point out that we used PyThaiNLP, version 2.2.4, tokenizing engine = newmm, to process Thai texts in this study. It is challenging to tokenize running Thai text into words or sentences because there are not clear word/sentence delimiters in Thai language. Therefore, using different tokenization engines may result in different segment of words/sentences.\n\n\nAfter data-cleansing process, ThaiSum dataset contains over 358,000 articles. The size of this dataset is comparable to a well-known English document summarization dataset, CNN/Dily mail dataset. Moreover, we analyse the characteristics of this dataset by measuring the abstractedness level, compassion rate, and content diversity. For more details, see thaisum\\_exploration.ipynb.", "#### Dataset Statistics\n\n\nThaiSum dataset consists of 358,868 articles. Average lengths of article texts and summaries are approximately 530 and 37 words respectively. As mentioned earlier, we also collected headlines, tags and labels provided in each article. Tags are similar to keywords of the article. An article normally contains several tags but a few labels. Tags can be name of places or persons that article is about while labels indicate news category (politic, entertainment, etc.). Ultimatly, ThaiSum contains 538,059 unique tags and 59 unique labels. Note that not every article contains tags or labels.", "#### Who are the source language producers?\n\n\nJournalists of respective articles", "### Annotations", "#### Annotation process\n\n\n'summary', 'type' and 'tags' are created by journalists who wrote the articles and/or their publishers.", "#### Who are the annotators?\n\n\n'summary', 'type' and 'tags' are created by journalists who wrote the articles and/or their publishers.", "### Personal and Sensitive Information\n\n\nAll data are public news articles. No personal and sensitive information is expected to be included.\n\n\nConsiderations for Using the Data\n---------------------------------", "### Social Impact of Dataset\n\n\n* News summarization in Thai\n* Language modeling for Thai news", "### Discussion of Biases\n\n\n* ThaiPBS receives funding from Thai government.\n* Thairath is known as the most popular newspaper in Thailand; no clear political leaning.\n* The Standard is a left-leaning online magazine.\n* Prachathai is a left-leaning, human-right-focused news site.", "### Other Known Limitations\n\n\nAdditional Information\n----------------------", "### Dataset Curators\n\n\n@nakhunchumpolsathien\n@caramelWaffle", "### Licensing Information\n\n\nMIT License", "### Contributions\n\n\nThanks to @cstorm125 for adding this dataset." ]
[ 120, 65, 17, 12, 6, 64, 27, 192, 4, 518, 143, 15, 5, 33, 37, 36, 21, 71, 14, 20, 8, 17 ]
[ "passage: TAGS\n#task_categories-summarization #task_categories-text-generation #task_categories-fill-mask #task_ids-language-modeling #task_ids-masked-language-modeling #annotations_creators-no-annotation #language_creators-found #multilinguality-monolingual #size_categories-100K<n<1M #source_datasets-original #language-Thai #license-mit #region-us \n### Dataset Summary\n\n\nThaiSum is a large-scale corpus for Thai text summarization obtained from several online news websites namely Thairath, ThaiPBS, Prachathai, and The Standard. This dataset consists of over 350,000 article and summary pairs written by journalists.### Supported Tasks and Leaderboards\n\n\nsummarization, language modeling### Languages\n\n\nThai\n\n\nDataset Structure\n-----------------### Data Instances### Data Fields\n\n\n* 'title': title of article\n* 'body': body of article\n* 'summary': summary of article\n* 'type': type of article, if any\n* 'tags': tags of article, separated by ','\n* 'url': URL of article### Data Splits\n\n\ntrain/valid/test: 358868 / 11000 / 11000\n\n\nDataset Creation\n----------------### Curation Rationale\n\n\nSequence-to-sequence (Seq2Seq) models have shown great achievement in text summarization. However, Seq2Seq model often requires large-scale training data to achieve effective results. Although many impressive advancements in text summarization field have been made, most of summarization studies focus on resource-rich languages. The progress of Thai text summarization is still far behind. The dearth of large-scale dataset keeps Thai text summarization in its infancy. As far as our knowledge goes, there is not a large-scale dataset for Thai text summarization available anywhere. Thus, we present ThaiSum, a large-scale corpus for Thai text summarization obtained from several online news websites namely Thairath, ThaiPBS, Prachathai, and The Standard.### Source Data" ]
51c8e59f485ee8dc43a52d715912d2b807f74c70
# Dataset Card for "synpre_copy_1M" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
tyzhu/synpre_copy_1M
[ "region:us" ]
2023-10-08T10:07:17+00:00
{"dataset_info": {"features": [{"name": "inputs", "dtype": "string"}, {"name": "targets", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 1617536415, "num_examples": 1000000}, {"name": "validation", "num_bytes": 16128863, "num_examples": 10000}], "download_size": 1399461338, "dataset_size": 1633665278}}
2023-10-08T10:11:14+00:00
[]
[]
TAGS #region-us
# Dataset Card for "synpre_copy_1M" More Information needed
[ "# Dataset Card for \"synpre_copy_1M\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"synpre_copy_1M\"\n\nMore Information needed" ]
[ 6, 16 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"synpre_copy_1M\"\n\nMore Information needed" ]
955dba764a07802e3d7a5beac9325255788507f1
# Dataset Card for RAG-Instruct-Test-Dataset ### Dataset Summary This is a test dataset for basic "retrieval augmented generation" (RAG) use cases in the enterprise, especially for finance and legal. This test dataset includes 100 samples with context passages pulled from common 'retrieval scenarios', e.g., financial news, earnings releases, contracts, invoices, technical articles, general news and short texts. The primary use case is to evaluate the effectiveness of an instruct-fine-tuned LLM used in conjunction with closed-context, fact-based question-answering, key-value extraction, and summarization with bulletpoints. The context passages are relatively short in this test-set ranging from ~100 tokens to ~500 tokens, and was designed for use with the BLING series of models but is suitable for comparison evaluations of any LLM for basic RAG scenarios. ### **PERFORMANCE on BASIC RAG TEST DATASET** | Model | Params (B) | Sourcing | GPU/CPU | Output Tokens | Out as % of Input | Process Time (secs) | Score (0-100) | | :---------- | :--------: | :----: | :-----: | :---------: | :-------: | :--------: | :-------: | | gpt-4 | <=1000 | Closed | Multi-GPU | 2665 | 10.53% | 183.8 | 100 | | gpt-3.5-turbo-instruct| <=175 | Closed | Multi-GPU | 2621 | 11.49% | 62.7 | 100 | | claude-instant-v1 | <=50 | Closed | Multi-GPU | 6337 | 26.50% | 154 | 100 | | aib-read-gpt | 7 | Closed | GPU | 1964 | 9.30% | 114 | 96 | | bling_falcon-1b-0.1 | 1.3 | Open | CPU | 3204 | 14.55% | 696 | 77 | | bling_pythia-1.4b-0.1 | 1.4 | Open | CPU | 2589 | 11.75% | 593.5 | 65 | | bling_pythia-1b-0.1 | 1.0 | Open | CPU | 2753 | 12.49% | 428 | 59 | | bling_cerebras-1.3b | 1.3 | Open | CPU | 3202 | 20.01% | 690.1 | 52 | | bling_pythia_410m | 0.41 | NA | CPU | 2349 | 10.66% | 189 | 36 | | bling_cerebras_590m | 0.59 | NA | CPU | 4407 | 20.01% | 400.8 | 30 | Please check out our [BLOG](https://medium.com/@darrenoberst/evaluating-llm-performance-in-rag-instruct-use-cases-083dc272a31d) with more details, commentary and comparative results testing with this dataset. We will be enhancing the test dataset as well as creating more advanced test datasets in the future. ### Languages English ## Dataset Structure 100 JSONL samples with 4 keys - "query" | "context" | "answer" | "sample_number" ### Personal and Sensitive Information The dataset samples were written bespoke for this objective, but do rely upon some public information, including major public figures and widely reported events. Any other names were created/masked and any overlap with real companies or people is coincidental. ## Dataset Card Contact Darren Oberst & llmware team Please reach out anytime if you are interested in this project and would like to participate and work with us!
llmware/rag_instruct_test_dataset_0.1
[ "license:apache-2.0", "finance", "legal", "region:us" ]
2023-10-08T10:55:59+00:00
{"license": "apache-2.0", "pretty_name": "RAG Instruct Test Dataset - Basic - v0.1", "tags": ["finance", "legal"]}
2023-11-04T07:03:13+00:00
[]
[]
TAGS #license-apache-2.0 #finance #legal #region-us
Dataset Card for RAG-Instruct-Test-Dataset ========================================== ### Dataset Summary This is a test dataset for basic "retrieval augmented generation" (RAG) use cases in the enterprise, especially for finance and legal. This test dataset includes 100 samples with context passages pulled from common 'retrieval scenarios', e.g., financial news, earnings releases, contracts, invoices, technical articles, general news and short texts. The primary use case is to evaluate the effectiveness of an instruct-fine-tuned LLM used in conjunction with closed-context, fact-based question-answering, key-value extraction, and summarization with bulletpoints. The context passages are relatively short in this test-set ranging from ~100 tokens to ~500 tokens, and was designed for use with the BLING series of models but is suitable for comparison evaluations of any LLM for basic RAG scenarios. ### PERFORMANCE on BASIC RAG TEST DATASET Please check out our BLOG with more details, commentary and comparative results testing with this dataset. We will be enhancing the test dataset as well as creating more advanced test datasets in the future. ### Languages English Dataset Structure ----------------- 100 JSONL samples with 4 keys - "query" | "context" | "answer" | "sample\_number" ### Personal and Sensitive Information The dataset samples were written bespoke for this objective, but do rely upon some public information, including major public figures and widely reported events. Any other names were created/masked and any overlap with real companies or people is coincidental. Dataset Card Contact -------------------- Darren Oberst & llmware team Please reach out anytime if you are interested in this project and would like to participate and work with us!
[ "### Dataset Summary\n\n\nThis is a test dataset for basic \"retrieval augmented generation\" (RAG) use cases in the enterprise, especially for finance and legal. This test dataset includes 100 samples with context passages pulled from common 'retrieval scenarios', e.g., financial news, earnings releases,\ncontracts, invoices, technical articles, general news and short texts. The primary use case is to evaluate the effectiveness of an\ninstruct-fine-tuned LLM used in conjunction with closed-context, fact-based question-answering, key-value extraction, and summarization with bulletpoints. The context passages are relatively short in this test-set ranging from ~100 tokens to ~500 tokens, and was designed for use with the\nBLING series of models but is suitable for comparison evaluations of any LLM for basic RAG scenarios.", "### PERFORMANCE on BASIC RAG TEST DATASET\n\n\n\nPlease check out our BLOG with more details, commentary and comparative results testing with this dataset.\n\n\nWe will be enhancing the test dataset as well as creating more advanced test datasets in the future.", "### Languages\n\n\nEnglish\n\n\nDataset Structure\n-----------------\n\n\n100 JSONL samples with 4 keys - \"query\" | \"context\" | \"answer\" | \"sample\\_number\"", "### Personal and Sensitive Information\n\n\nThe dataset samples were written bespoke for this objective, but do rely upon some public information, including major public figures and widely reported events. \n\nAny other names were created/masked and any overlap with real companies or people is coincidental.\n\n\nDataset Card Contact\n--------------------\n\n\nDarren Oberst & llmware team\n\n\nPlease reach out anytime if you are interested in this project and would like to participate and work with us!" ]
[ "TAGS\n#license-apache-2.0 #finance #legal #region-us \n", "### Dataset Summary\n\n\nThis is a test dataset for basic \"retrieval augmented generation\" (RAG) use cases in the enterprise, especially for finance and legal. This test dataset includes 100 samples with context passages pulled from common 'retrieval scenarios', e.g., financial news, earnings releases,\ncontracts, invoices, technical articles, general news and short texts. The primary use case is to evaluate the effectiveness of an\ninstruct-fine-tuned LLM used in conjunction with closed-context, fact-based question-answering, key-value extraction, and summarization with bulletpoints. The context passages are relatively short in this test-set ranging from ~100 tokens to ~500 tokens, and was designed for use with the\nBLING series of models but is suitable for comparison evaluations of any LLM for basic RAG scenarios.", "### PERFORMANCE on BASIC RAG TEST DATASET\n\n\n\nPlease check out our BLOG with more details, commentary and comparative results testing with this dataset.\n\n\nWe will be enhancing the test dataset as well as creating more advanced test datasets in the future.", "### Languages\n\n\nEnglish\n\n\nDataset Structure\n-----------------\n\n\n100 JSONL samples with 4 keys - \"query\" | \"context\" | \"answer\" | \"sample\\_number\"", "### Personal and Sensitive Information\n\n\nThe dataset samples were written bespoke for this objective, but do rely upon some public information, including major public figures and widely reported events. \n\nAny other names were created/masked and any overlap with real companies or people is coincidental.\n\n\nDataset Card Contact\n--------------------\n\n\nDarren Oberst & llmware team\n\n\nPlease reach out anytime if you are interested in this project and would like to participate and work with us!" ]
[ 19, 206, 60, 49, 101 ]
[ "passage: TAGS\n#license-apache-2.0 #finance #legal #region-us \n### Dataset Summary\n\n\nThis is a test dataset for basic \"retrieval augmented generation\" (RAG) use cases in the enterprise, especially for finance and legal. This test dataset includes 100 samples with context passages pulled from common 'retrieval scenarios', e.g., financial news, earnings releases,\ncontracts, invoices, technical articles, general news and short texts. The primary use case is to evaluate the effectiveness of an\ninstruct-fine-tuned LLM used in conjunction with closed-context, fact-based question-answering, key-value extraction, and summarization with bulletpoints. The context passages are relatively short in this test-set ranging from ~100 tokens to ~500 tokens, and was designed for use with the\nBLING series of models but is suitable for comparison evaluations of any LLM for basic RAG scenarios.### PERFORMANCE on BASIC RAG TEST DATASET\n\n\n\nPlease check out our BLOG with more details, commentary and comparative results testing with this dataset.\n\n\nWe will be enhancing the test dataset as well as creating more advanced test datasets in the future.### Languages\n\n\nEnglish\n\n\nDataset Structure\n-----------------\n\n\n100 JSONL samples with 4 keys - \"query\" | \"context\" | \"answer\" | \"sample\\_number\"### Personal and Sensitive Information\n\n\nThe dataset samples were written bespoke for this objective, but do rely upon some public information, including major public figures and widely reported events. \n\nAny other names were created/masked and any overlap with real companies or people is coincidental.\n\n\nDataset Card Contact\n--------------------\n\n\nDarren Oberst & llmware team\n\n\nPlease reach out anytime if you are interested in this project and would like to participate and work with us!" ]
78696beeba651e3e93c5727a62539a96a277b0cd
# Dataset Card for "ceval-exam-zhtw" C-Eval 是一個針對基礎模型的綜合中文評估套件。它由 13,948 道多項選擇題組成,涵蓋 52 個不同的學科和四個難度級別。[原始網站](https://cevalbenchmark.com/)和 [GitHub](https://github.com/SJTU-LIT/ceval/tree/main) 或查看[論文](https://arxiv.org/abs/2305.08322)以了解更多詳細資訊。 C-Eval 主要的數據都是使用簡體中文來撰寫并且用來評測簡體中文的 LLM 的效能來設計的,本數據集使用 OpenCC 來進行簡繁的中文轉換,主要目的方便繁中 LLM 的開發與驗測。 ## 下載 使用 Hugging Face `datasets` 直接載入資料集: ```python from datasets import load_dataset dataset=load_dataset(r"erhwenkuo/ceval-exam-zhtw",name="computer_network") print(dataset['val'][0]) # {'id': 0, 'question': '使用位填充方法,以01111110為位首flag,資料為011011111111111111110010,求問傳送時要新增幾個0____', 'A': '1', 'B': '2', 'C': '3', 'D': '4', 'answer': 'C', 'explanation': ''} ``` ## 授權 C-Eval 資料集根據 Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International License 授權。 ## Citation 如果您使用這個資料集,請引用原始 C-Eval 的論文。 ``` @article{huang2023ceval, title={C-Eval: A Multi-Level Multi-Discipline Chinese Evaluation Suite for Foundation Models}, author={Huang, Yuzhen and Bai, Yuzhuo and Zhu, Zhihao and Zhang, Junlei and Zhang, Jinghan and Su, Tangjun and Liu, Junteng and Lv, Chuancheng and Zhang, Yikai and Lei, Jiayi and Fu, Yao and Sun, Maosong and He, Junxian}, journal={arXiv preprint arXiv:2305.08322}, year={2023} } ```
erhwenkuo/ceval-exam-zhtw
[ "language:zh", "license:cc", "\"llm-eval\"", "arxiv:2305.08322", "region:us" ]
2023-10-08T11:22:42+00:00
{"language": ["zh"], "license": "cc", "dataset_info": [{"config_name": "accountant", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 177004, "num_examples": 443}, {"name": "val", "num_bytes": 19555, "num_examples": 49}, {"name": "dev", "num_bytes": 3414, "num_examples": 5}], "download_size": 151561, "dataset_size": 199973}, {"config_name": "advanced_mathematics", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 50031, "num_examples": 173}, {"name": "val", "num_bytes": 5331, "num_examples": 19}, {"name": "dev", "num_bytes": 7021, "num_examples": 5}], "download_size": 50945, "dataset_size": 62383}, {"config_name": "art_studies", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 41230, "num_examples": 298}, {"name": "val", "num_bytes": 4581, "num_examples": 33}, {"name": "dev", "num_bytes": 1439, "num_examples": 5}], "download_size": 46573, "dataset_size": 47250}, {"config_name": "basic_medicine", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 28820, "num_examples": 175}, {"name": "val", "num_bytes": 2627, "num_examples": 19}, {"name": "dev", "num_bytes": 1825, "num_examples": 5}], "download_size": 37502, "dataset_size": 33272}, {"config_name": "business_administration", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 78396, "num_examples": 301}, {"name": "val", "num_bytes": 9225, "num_examples": 33}, {"name": "dev", "num_bytes": 3155, "num_examples": 5}], "download_size": 75404, "dataset_size": 90776}, {"config_name": "chinese_language_and_literature", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 32328, "num_examples": 209}, {"name": "val", "num_bytes": 3446, "num_examples": 23}, {"name": "dev", "num_bytes": 1892, "num_examples": 5}], "download_size": 43537, "dataset_size": 37666}, {"config_name": "civil_servant", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 181519, "num_examples": 429}, {"name": "val", "num_bytes": 21273, "num_examples": 47}, {"name": "dev", "num_bytes": 4576, "num_examples": 5}], "download_size": 180536, "dataset_size": 207368}, {"config_name": "clinical_medicine", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 42161, "num_examples": 200}, {"name": "val", "num_bytes": 4167, "num_examples": 22}, {"name": "dev", "num_bytes": 1951, "num_examples": 5}], "download_size": 48783, "dataset_size": 48279}, {"config_name": "college_chemistry", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 45801, "num_examples": 224}, {"name": "val", "num_bytes": 4443, "num_examples": 24}, {"name": "dev", "num_bytes": 3611, "num_examples": 5}], "download_size": 53682, "dataset_size": 53855}, {"config_name": "college_economics", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 119746, "num_examples": 497}, {"name": "val", "num_bytes": 14461, "num_examples": 55}, {"name": "dev", "num_bytes": 3673, "num_examples": 5}], "download_size": 106480, "dataset_size": 137880}, {"config_name": "college_physics", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 55731, "num_examples": 176}, {"name": "val", "num_bytes": 6145, "num_examples": 19}, {"name": "dev", "num_bytes": 3824, "num_examples": 5}], "download_size": 62806, "dataset_size": 65700}, {"config_name": "college_programming", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 84024, "num_examples": 342}, {"name": "val", "num_bytes": 9615, "num_examples": 37}, {"name": "dev", "num_bytes": 2900, "num_examples": 5}], "download_size": 83274, "dataset_size": 96539}, {"config_name": "computer_architecture", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 41173, "num_examples": 193}, {"name": "val", "num_bytes": 4188, "num_examples": 21}, {"name": "dev", "num_bytes": 2841, "num_examples": 5}], "download_size": 48203, "dataset_size": 48202}, {"config_name": "computer_network", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 35495, "num_examples": 171}, {"name": "val", "num_bytes": 3814, "num_examples": 19}, {"name": "dev", "num_bytes": 2364, "num_examples": 5}], "download_size": 43988, "dataset_size": 41673}, {"config_name": "discrete_mathematics", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 36057, "num_examples": 153}, {"name": "val", "num_bytes": 3424, "num_examples": 16}, {"name": "dev", "num_bytes": 2002, "num_examples": 5}], "download_size": 43029, "dataset_size": 41483}, {"config_name": "education_science", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 55756, "num_examples": 270}, {"name": "val", "num_bytes": 5522, "num_examples": 29}, {"name": "dev", "num_bytes": 3093, "num_examples": 5}], "download_size": 59946, "dataset_size": 64371}, {"config_name": "electrical_engineer", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 73769, "num_examples": 339}, {"name": "val", "num_bytes": 8327, "num_examples": 37}, {"name": "dev", "num_bytes": 2180, "num_examples": 5}], "download_size": 74147, "dataset_size": 84276}, {"config_name": "environmental_impact_assessment_engineer", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 84701, "num_examples": 281}, {"name": "val", "num_bytes": 9186, "num_examples": 31}, {"name": "dev", "num_bytes": 2495, "num_examples": 5}], "download_size": 73813, "dataset_size": 96382}, {"config_name": "fire_engineer", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 83743, "num_examples": 282}, {"name": "val", "num_bytes": 10016, "num_examples": 31}, {"name": "dev", "num_bytes": 2209, "num_examples": 5}], "download_size": 82070, "dataset_size": 95968}, {"config_name": "high_school_biology", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 55242, "num_examples": 175}, {"name": "val", "num_bytes": 6105, "num_examples": 19}, {"name": "dev", "num_bytes": 2164, "num_examples": 5}], "download_size": 60835, "dataset_size": 63511}, {"config_name": "high_school_chemistry", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 46918, "num_examples": 172}, {"name": "val", "num_bytes": 5625, "num_examples": 19}, {"name": "dev", "num_bytes": 2576, "num_examples": 5}], "download_size": 55719, "dataset_size": 55119}, {"config_name": "high_school_chinese", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 110380, "num_examples": 178}, {"name": "val", "num_bytes": 10475, "num_examples": 19}, {"name": "dev", "num_bytes": 5290, "num_examples": 5}], "download_size": 120269, "dataset_size": 126145}, {"config_name": "high_school_geography", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 41232, "num_examples": 178}, {"name": "val", "num_bytes": 3985, "num_examples": 19}, {"name": "dev", "num_bytes": 2087, "num_examples": 5}], "download_size": 50092, "dataset_size": 47304}, {"config_name": "high_school_history", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 56205, "num_examples": 182}, {"name": "val", "num_bytes": 6624, "num_examples": 20}, {"name": "dev", "num_bytes": 2421, "num_examples": 5}], "download_size": 68561, "dataset_size": 65250}, {"config_name": "high_school_mathematics", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 41095, "num_examples": 166}, {"name": "val", "num_bytes": 5144, "num_examples": 18}, {"name": "dev", "num_bytes": 3552, "num_examples": 5}], "download_size": 53179, "dataset_size": 49791}, {"config_name": "high_school_physics", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 61682, "num_examples": 175}, {"name": "val", "num_bytes": 7266, "num_examples": 19}, {"name": "dev", "num_bytes": 2266, "num_examples": 5}], "download_size": 66481, "dataset_size": 71214}, {"config_name": "high_school_politics", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 83428, "num_examples": 176}, {"name": "val", "num_bytes": 8912, "num_examples": 19}, {"name": "dev", "num_bytes": 4730, "num_examples": 5}], "download_size": 90433, "dataset_size": 97070}, {"config_name": "ideological_and_moral_cultivation", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 35315, "num_examples": 172}, {"name": "val", "num_bytes": 3241, "num_examples": 19}, {"name": "dev", "num_bytes": 1296, "num_examples": 5}], "download_size": 41159, "dataset_size": 39852}, {"config_name": "law", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 79806, "num_examples": 221}, {"name": "val", "num_bytes": 8119, "num_examples": 24}, {"name": "dev", "num_bytes": 4142, "num_examples": 5}], "download_size": 83236, "dataset_size": 92067}, {"config_name": "legal_professional", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 122000, "num_examples": 215}, {"name": "val", "num_bytes": 12215, "num_examples": 23}, {"name": "dev", "num_bytes": 6974, "num_examples": 5}], "download_size": 125256, "dataset_size": 141189}, {"config_name": "logic", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 144288, "num_examples": 204}, {"name": "val", "num_bytes": 15558, "num_examples": 22}, {"name": "dev", "num_bytes": 5641, "num_examples": 5}], "download_size": 142564, "dataset_size": 165487}, {"config_name": "mao_zedong_thought", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 56708, "num_examples": 219}, {"name": "val", "num_bytes": 5487, "num_examples": 24}, {"name": "dev", "num_bytes": 3352, "num_examples": 5}], "download_size": 57948, "dataset_size": 65547}, {"config_name": "marxism", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 38674, "num_examples": 179}, {"name": "val", "num_bytes": 4251, "num_examples": 19}, {"name": "dev", "num_bytes": 2142, "num_examples": 5}], "download_size": 44933, "dataset_size": 45067}, {"config_name": "metrology_engineer", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 47544, "num_examples": 219}, {"name": "val", "num_bytes": 6134, "num_examples": 24}, {"name": "dev", "num_bytes": 2485, "num_examples": 5}], "download_size": 54828, "dataset_size": 56163}, {"config_name": "middle_school_biology", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 47267, "num_examples": 192}, {"name": "val", "num_bytes": 5263, "num_examples": 21}, {"name": "dev", "num_bytes": 4327, "num_examples": 5}], "download_size": 58472, "dataset_size": 56857}, {"config_name": "middle_school_chemistry", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 47575, "num_examples": 185}, {"name": "val", "num_bytes": 5654, "num_examples": 20}, {"name": "dev", "num_bytes": 3866, "num_examples": 5}], "download_size": 59099, "dataset_size": 57095}, {"config_name": "middle_school_geography", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 23332, "num_examples": 108}, {"name": "val", "num_bytes": 2641, "num_examples": 12}, {"name": "dev", "num_bytes": 2148, "num_examples": 5}], "download_size": 37389, "dataset_size": 28121}, {"config_name": "middle_school_history", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 47076, "num_examples": 207}, {"name": "val", "num_bytes": 5990, "num_examples": 22}, {"name": "dev", "num_bytes": 2014, "num_examples": 5}], "download_size": 56042, "dataset_size": 55080}, {"config_name": "middle_school_mathematics", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 33142, "num_examples": 177}, {"name": "val", "num_bytes": 4897, "num_examples": 19}, {"name": "dev", "num_bytes": 3187, "num_examples": 5}], "download_size": 44657, "dataset_size": 41226}, {"config_name": "middle_school_physics", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 48796, "num_examples": 178}, {"name": "val", "num_bytes": 5279, "num_examples": 19}, {"name": "dev", "num_bytes": 3531, "num_examples": 5}], "download_size": 59820, "dataset_size": 57606}, {"config_name": "middle_school_politics", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 72499, "num_examples": 193}, {"name": "val", "num_bytes": 7326, "num_examples": 21}, {"name": "dev", "num_bytes": 3687, "num_examples": 5}], "download_size": 76847, "dataset_size": 83512}, {"config_name": "modern_chinese_history", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 51247, "num_examples": 212}, {"name": "val", "num_bytes": 5188, "num_examples": 23}, {"name": "dev", "num_bytes": 2983, "num_examples": 5}], "download_size": 59728, "dataset_size": 59418}, {"config_name": "operating_system", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 31467, "num_examples": 179}, {"name": "val", "num_bytes": 3335, "num_examples": 19}, {"name": "dev", "num_bytes": 2611, "num_examples": 5}], "download_size": 40349, "dataset_size": 37413}, {"config_name": "physician", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 89819, "num_examples": 443}, {"name": "val", "num_bytes": 8713, "num_examples": 49}, {"name": "dev", "num_bytes": 2033, "num_examples": 5}], "download_size": 91464, "dataset_size": 100565}, {"config_name": "plant_protection", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 31877, "num_examples": 199}, {"name": "val", "num_bytes": 3634, "num_examples": 22}, {"name": "dev", "num_bytes": 3726, "num_examples": 5}], "download_size": 42813, "dataset_size": 39237}, {"config_name": "probability_and_statistics", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 56749, "num_examples": 166}, {"name": "val", "num_bytes": 5781, "num_examples": 18}, {"name": "dev", "num_bytes": 6769, "num_examples": 5}], "download_size": 63258, "dataset_size": 69299}, {"config_name": "professional_tour_guide", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 41231, "num_examples": 266}, {"name": "val", "num_bytes": 4509, "num_examples": 29}, {"name": "dev", "num_bytes": 1764, "num_examples": 5}], "download_size": 51642, "dataset_size": 47504}, {"config_name": "sports_science", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 32536, "num_examples": 180}, {"name": "val", "num_bytes": 3493, "num_examples": 19}, {"name": "dev", "num_bytes": 4182, "num_examples": 5}], "download_size": 45905, "dataset_size": 40211}, {"config_name": "tax_accountant", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 174509, "num_examples": 443}, {"name": "val", "num_bytes": 18938, "num_examples": 49}, {"name": "dev", "num_bytes": 4274, "num_examples": 5}], "download_size": 148037, "dataset_size": 197721}, {"config_name": "teacher_qualification", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 107372, "num_examples": 399}, {"name": "val", "num_bytes": 12220, "num_examples": 44}, {"name": "dev", "num_bytes": 3212, "num_examples": 5}], "download_size": 105439, "dataset_size": 122804}, {"config_name": "urban_and_rural_planner", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 110473, "num_examples": 418}, {"name": "val", "num_bytes": 12793, "num_examples": 46}, {"name": "dev", "num_bytes": 3184, "num_examples": 5}], "download_size": 101932, "dataset_size": 126450}, {"config_name": "veterinary_medicine", "features": [{"name": "id", "dtype": "int32"}, {"name": "question", "dtype": "string"}, {"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "explanation", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 39465, "num_examples": 210}, {"name": "val", "num_bytes": 4562, "num_examples": 23}, {"name": "dev", "num_bytes": 2365, "num_examples": 5}], "download_size": 48753, "dataset_size": 46392}], "configs": [{"config_name": "accountant", "data_files": [{"split": "test", "path": "accountant/test-*"}, {"split": "val", "path": "accountant/val-*"}, {"split": "dev", "path": "accountant/dev-*"}]}, {"config_name": "advanced_mathematics", "data_files": [{"split": "test", "path": "advanced_mathematics/test-*"}, {"split": "val", "path": "advanced_mathematics/val-*"}, {"split": "dev", "path": "advanced_mathematics/dev-*"}]}, {"config_name": "art_studies", "data_files": [{"split": "test", "path": "art_studies/test-*"}, {"split": "val", "path": "art_studies/val-*"}, {"split": "dev", "path": "art_studies/dev-*"}]}, {"config_name": "basic_medicine", "data_files": [{"split": "test", "path": "basic_medicine/test-*"}, {"split": "val", "path": "basic_medicine/val-*"}, {"split": "dev", "path": "basic_medicine/dev-*"}]}, {"config_name": "business_administration", "data_files": [{"split": "test", "path": "business_administration/test-*"}, {"split": "val", "path": "business_administration/val-*"}, {"split": "dev", "path": "business_administration/dev-*"}]}, {"config_name": "chinese_language_and_literature", "data_files": [{"split": "test", "path": "chinese_language_and_literature/test-*"}, {"split": "val", "path": "chinese_language_and_literature/val-*"}, {"split": "dev", "path": "chinese_language_and_literature/dev-*"}]}, {"config_name": "civil_servant", "data_files": [{"split": "test", "path": "civil_servant/test-*"}, {"split": "val", "path": "civil_servant/val-*"}, {"split": "dev", "path": "civil_servant/dev-*"}]}, {"config_name": "clinical_medicine", "data_files": [{"split": "test", "path": "clinical_medicine/test-*"}, {"split": "val", "path": "clinical_medicine/val-*"}, {"split": "dev", "path": "clinical_medicine/dev-*"}]}, {"config_name": "college_chemistry", "data_files": [{"split": "test", "path": "college_chemistry/test-*"}, {"split": "val", "path": "college_chemistry/val-*"}, {"split": "dev", "path": "college_chemistry/dev-*"}]}, {"config_name": "college_economics", "data_files": [{"split": "test", "path": "college_economics/test-*"}, {"split": "val", "path": "college_economics/val-*"}, {"split": "dev", "path": "college_economics/dev-*"}]}, {"config_name": "college_physics", "data_files": [{"split": "test", "path": "college_physics/test-*"}, {"split": "val", "path": "college_physics/val-*"}, {"split": "dev", "path": "college_physics/dev-*"}]}, {"config_name": "college_programming", "data_files": [{"split": "test", "path": "college_programming/test-*"}, {"split": "val", "path": "college_programming/val-*"}, {"split": "dev", "path": "college_programming/dev-*"}]}, {"config_name": "computer_architecture", "data_files": [{"split": "test", "path": "computer_architecture/test-*"}, {"split": "val", "path": "computer_architecture/val-*"}, {"split": "dev", "path": "computer_architecture/dev-*"}]}, {"config_name": "computer_network", "data_files": [{"split": "test", "path": "computer_network/test-*"}, {"split": "val", "path": "computer_network/val-*"}, {"split": "dev", "path": "computer_network/dev-*"}]}, {"config_name": "discrete_mathematics", "data_files": [{"split": "test", "path": "discrete_mathematics/test-*"}, {"split": "val", "path": "discrete_mathematics/val-*"}, {"split": "dev", "path": "discrete_mathematics/dev-*"}]}, {"config_name": "education_science", "data_files": [{"split": "test", "path": "education_science/test-*"}, {"split": "val", "path": "education_science/val-*"}, {"split": "dev", "path": "education_science/dev-*"}]}, {"config_name": "electrical_engineer", "data_files": [{"split": "test", "path": "electrical_engineer/test-*"}, {"split": "val", "path": "electrical_engineer/val-*"}, {"split": "dev", "path": "electrical_engineer/dev-*"}]}, {"config_name": "environmental_impact_assessment_engineer", "data_files": [{"split": "test", "path": "environmental_impact_assessment_engineer/test-*"}, {"split": "val", "path": "environmental_impact_assessment_engineer/val-*"}, {"split": "dev", "path": "environmental_impact_assessment_engineer/dev-*"}]}, {"config_name": "fire_engineer", "data_files": [{"split": "test", "path": "fire_engineer/test-*"}, {"split": "val", "path": "fire_engineer/val-*"}, {"split": "dev", "path": "fire_engineer/dev-*"}]}, {"config_name": "high_school_biology", "data_files": [{"split": "test", "path": "high_school_biology/test-*"}, {"split": "val", "path": "high_school_biology/val-*"}, {"split": "dev", "path": "high_school_biology/dev-*"}]}, {"config_name": "high_school_chemistry", "data_files": [{"split": "test", "path": "high_school_chemistry/test-*"}, {"split": "val", "path": "high_school_chemistry/val-*"}, {"split": "dev", "path": "high_school_chemistry/dev-*"}]}, {"config_name": "high_school_chinese", "data_files": [{"split": "test", "path": "high_school_chinese/test-*"}, {"split": "val", "path": "high_school_chinese/val-*"}, {"split": "dev", "path": "high_school_chinese/dev-*"}]}, {"config_name": "high_school_geography", "data_files": [{"split": "test", "path": "high_school_geography/test-*"}, {"split": "val", "path": "high_school_geography/val-*"}, {"split": "dev", "path": "high_school_geography/dev-*"}]}, {"config_name": "high_school_history", "data_files": [{"split": "test", "path": "high_school_history/test-*"}, {"split": "val", "path": "high_school_history/val-*"}, {"split": "dev", "path": "high_school_history/dev-*"}]}, {"config_name": "high_school_mathematics", "data_files": [{"split": "test", "path": "high_school_mathematics/test-*"}, {"split": "val", "path": "high_school_mathematics/val-*"}, {"split": "dev", "path": "high_school_mathematics/dev-*"}]}, {"config_name": "high_school_physics", "data_files": [{"split": "test", "path": "high_school_physics/test-*"}, {"split": "val", "path": "high_school_physics/val-*"}, {"split": "dev", "path": "high_school_physics/dev-*"}]}, {"config_name": "high_school_politics", "data_files": [{"split": "test", "path": "high_school_politics/test-*"}, {"split": "val", "path": "high_school_politics/val-*"}, {"split": "dev", "path": "high_school_politics/dev-*"}]}, {"config_name": "ideological_and_moral_cultivation", "data_files": [{"split": "test", "path": "ideological_and_moral_cultivation/test-*"}, {"split": "val", "path": "ideological_and_moral_cultivation/val-*"}, {"split": "dev", "path": "ideological_and_moral_cultivation/dev-*"}]}, {"config_name": "law", "data_files": [{"split": "test", "path": "law/test-*"}, {"split": "val", "path": "law/val-*"}, {"split": "dev", "path": "law/dev-*"}]}, {"config_name": "legal_professional", "data_files": [{"split": "test", "path": "legal_professional/test-*"}, {"split": "val", "path": "legal_professional/val-*"}, {"split": "dev", "path": "legal_professional/dev-*"}]}, {"config_name": "logic", "data_files": [{"split": "test", "path": "logic/test-*"}, {"split": "val", "path": "logic/val-*"}, {"split": "dev", "path": "logic/dev-*"}]}, {"config_name": "mao_zedong_thought", "data_files": [{"split": "test", "path": "mao_zedong_thought/test-*"}, {"split": "val", "path": "mao_zedong_thought/val-*"}, {"split": "dev", "path": "mao_zedong_thought/dev-*"}]}, {"config_name": "marxism", "data_files": [{"split": "test", "path": "marxism/test-*"}, {"split": "val", "path": "marxism/val-*"}, {"split": "dev", "path": "marxism/dev-*"}]}, {"config_name": "metrology_engineer", "data_files": [{"split": "test", "path": "metrology_engineer/test-*"}, {"split": "val", "path": "metrology_engineer/val-*"}, {"split": "dev", "path": "metrology_engineer/dev-*"}]}, {"config_name": "middle_school_biology", "data_files": [{"split": "test", "path": "middle_school_biology/test-*"}, {"split": "val", "path": "middle_school_biology/val-*"}, {"split": "dev", "path": "middle_school_biology/dev-*"}]}, {"config_name": "middle_school_chemistry", "data_files": [{"split": "test", "path": "middle_school_chemistry/test-*"}, {"split": "val", "path": "middle_school_chemistry/val-*"}, {"split": "dev", "path": "middle_school_chemistry/dev-*"}]}, {"config_name": "middle_school_geography", "data_files": [{"split": "test", "path": "middle_school_geography/test-*"}, {"split": "val", "path": "middle_school_geography/val-*"}, {"split": "dev", "path": "middle_school_geography/dev-*"}]}, {"config_name": "middle_school_history", "data_files": [{"split": "test", "path": "middle_school_history/test-*"}, {"split": "val", "path": "middle_school_history/val-*"}, {"split": "dev", "path": "middle_school_history/dev-*"}]}, {"config_name": "middle_school_mathematics", "data_files": [{"split": "test", "path": "middle_school_mathematics/test-*"}, {"split": "val", "path": "middle_school_mathematics/val-*"}, {"split": "dev", "path": "middle_school_mathematics/dev-*"}]}, {"config_name": "middle_school_physics", "data_files": [{"split": "test", "path": "middle_school_physics/test-*"}, {"split": "val", "path": "middle_school_physics/val-*"}, {"split": "dev", "path": "middle_school_physics/dev-*"}]}, {"config_name": "middle_school_politics", "data_files": [{"split": "test", "path": "middle_school_politics/test-*"}, {"split": "val", "path": "middle_school_politics/val-*"}, {"split": "dev", "path": "middle_school_politics/dev-*"}]}, {"config_name": "modern_chinese_history", "data_files": [{"split": "test", "path": "modern_chinese_history/test-*"}, {"split": "val", "path": "modern_chinese_history/val-*"}, {"split": "dev", "path": "modern_chinese_history/dev-*"}]}, {"config_name": "operating_system", "data_files": [{"split": "test", "path": "operating_system/test-*"}, {"split": "val", "path": "operating_system/val-*"}, {"split": "dev", "path": "operating_system/dev-*"}]}, {"config_name": "physician", "data_files": [{"split": "test", "path": "physician/test-*"}, {"split": "val", "path": "physician/val-*"}, {"split": "dev", "path": "physician/dev-*"}]}, {"config_name": "plant_protection", "data_files": [{"split": "test", "path": "plant_protection/test-*"}, {"split": "val", "path": "plant_protection/val-*"}, {"split": "dev", "path": "plant_protection/dev-*"}]}, {"config_name": "probability_and_statistics", "data_files": [{"split": "test", "path": "probability_and_statistics/test-*"}, {"split": "val", "path": "probability_and_statistics/val-*"}, {"split": "dev", "path": "probability_and_statistics/dev-*"}]}, {"config_name": "professional_tour_guide", "data_files": [{"split": "test", "path": "professional_tour_guide/test-*"}, {"split": "val", "path": "professional_tour_guide/val-*"}, {"split": "dev", "path": "professional_tour_guide/dev-*"}]}, {"config_name": "sports_science", "data_files": [{"split": "test", "path": "sports_science/test-*"}, {"split": "val", "path": "sports_science/val-*"}, {"split": "dev", "path": "sports_science/dev-*"}]}, {"config_name": "tax_accountant", "data_files": [{"split": "test", "path": "tax_accountant/test-*"}, {"split": "val", "path": "tax_accountant/val-*"}, {"split": "dev", "path": "tax_accountant/dev-*"}]}, {"config_name": "teacher_qualification", "data_files": [{"split": "test", "path": "teacher_qualification/test-*"}, {"split": "val", "path": "teacher_qualification/val-*"}, {"split": "dev", "path": "teacher_qualification/dev-*"}]}, {"config_name": "urban_and_rural_planner", "data_files": [{"split": "test", "path": "urban_and_rural_planner/test-*"}, {"split": "val", "path": "urban_and_rural_planner/val-*"}, {"split": "dev", "path": "urban_and_rural_planner/dev-*"}]}, {"config_name": "veterinary_medicine", "data_files": [{"split": "test", "path": "veterinary_medicine/test-*"}, {"split": "val", "path": "veterinary_medicine/val-*"}, {"split": "dev", "path": "veterinary_medicine/dev-*"}]}], "tags": ["\"llm-eval\""]}
2023-10-10T01:14:55+00:00
[ "2305.08322" ]
[ "zh" ]
TAGS #language-Chinese #license-cc #"llm-eval" #arxiv-2305.08322 #region-us
# Dataset Card for "ceval-exam-zhtw" C-Eval 是一個針對基礎模型的綜合中文評估套件。它由 13,948 道多項選擇題組成,涵蓋 52 個不同的學科和四個難度級別。原始網站和 GitHub 或查看論文以了解更多詳細資訊。 C-Eval 主要的數據都是使用簡體中文來撰寫并且用來評測簡體中文的 LLM 的效能來設計的,本數據集使用 OpenCC 來進行簡繁的中文轉換,主要目的方便繁中 LLM 的開發與驗測。 ## 下載 使用 Hugging Face 'datasets' 直接載入資料集: ## 授權 C-Eval 資料集根據 Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International License 授權。 如果您使用這個資料集,請引用原始 C-Eval 的論文。
[ "# Dataset Card for \"ceval-exam-zhtw\"\n\nC-Eval 是一個針對基礎模型的綜合中文評估套件。它由 13,948 道多項選擇題組成,涵蓋 52 個不同的學科和四個難度級別。原始網站和 GitHub 或查看論文以了解更多詳細資訊。\n\nC-Eval 主要的數據都是使用簡體中文來撰寫并且用來評測簡體中文的 LLM 的效能來設計的,本數據集使用 OpenCC 來進行簡繁的中文轉換,主要目的方便繁中 LLM 的開發與驗測。", "## 下載\n\n使用 Hugging Face 'datasets' 直接載入資料集:", "## 授權\n\nC-Eval 資料集根據 Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International License 授權。\n\n如果您使用這個資料集,請引用原始 C-Eval 的論文。" ]
[ "TAGS\n#language-Chinese #license-cc #\"llm-eval\" #arxiv-2305.08322 #region-us \n", "# Dataset Card for \"ceval-exam-zhtw\"\n\nC-Eval 是一個針對基礎模型的綜合中文評估套件。它由 13,948 道多項選擇題組成,涵蓋 52 個不同的學科和四個難度級別。原始網站和 GitHub 或查看論文以了解更多詳細資訊。\n\nC-Eval 主要的數據都是使用簡體中文來撰寫并且用來評測簡體中文的 LLM 的效能來設計的,本數據集使用 OpenCC 來進行簡繁的中文轉換,主要目的方便繁中 LLM 的開發與驗測。", "## 下載\n\n使用 Hugging Face 'datasets' 直接載入資料集:", "## 授權\n\nC-Eval 資料集根據 Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International License 授權。\n\n如果您使用這個資料集,請引用原始 C-Eval 的論文。" ]
[ 33, 133, 20, 45 ]
[ "passage: TAGS\n#language-Chinese #license-cc #\"llm-eval\" #arxiv-2305.08322 #region-us \n# Dataset Card for \"ceval-exam-zhtw\"\n\nC-Eval 是一個針對基礎模型的綜合中文評估套件。它由 13,948 道多項選擇題組成,涵蓋 52 個不同的學科和四個難度級別。原始網站和 GitHub 或查看論文以了解更多詳細資訊。\n\nC-Eval 主要的數據都是使用簡體中文來撰寫并且用來評測簡體中文的 LLM 的效能來設計的,本數據集使用 OpenCC 來進行簡繁的中文轉換,主要目的方便繁中 LLM 的開發與驗測。## 下載\n\n使用 Hugging Face 'datasets' 直接載入資料集:## 授權\n\nC-Eval 資料集根據 Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International License 授權。\n\n如果您使用這個資料集,請引用原始 C-Eval 的論文。" ]
5a6f6a840b14498f6434c209ae2135173c75d76e
# AutoTrain Dataset for project: data-protection_194 ## Dataset Description This dataset has been automatically processed by AutoTrain for project data-protection_194. ### Languages The BCP-47 code for the dataset's language is unk. ## Dataset Structure ### Data Instances A sample from this dataset looks as follows: ```json [ { "text": "grindr conserver\u00e0 i registri delle applicazioni in virt\u00f9 della riservatezza, in un ambiente controllato e sicuro, per sei (6) mesi dalla data di sottoscrizione", "target": 0 }, { "text": "riceve una licenza revocabile, non- esclusiva, non-cedibile, limitata e personale per l'accesso e la scelta dei diritti che ea rende espressamente disponibili", "target": 1 } ] ``` ### Dataset Fields The dataset has the following fields (also called "features"): ```json { "text": "Value(dtype='string', id=None)", "target": "ClassLabel(names=['data protection', 'other'], id=None)" } ``` ### Dataset Splits This dataset is split into a train and validation split. The split sizes are as follow: | Split name | Num samples | | ------------ | ------------------- | | train | 154 | | valid | 40 |
maxzancanaro/autotrain-data-data-protection_194
[ "task_categories:text-classification", "region:us" ]
2023-10-08T11:30:19+00:00
{"task_categories": ["text-classification"]}
2023-10-08T11:30:49+00:00
[]
[]
TAGS #task_categories-text-classification #region-us
AutoTrain Dataset for project: data-protection\_194 =================================================== Dataset Description ------------------- This dataset has been automatically processed by AutoTrain for project data-protection\_194. ### Languages The BCP-47 code for the dataset's language is unk. Dataset Structure ----------------- ### Data Instances A sample from this dataset looks as follows: ### Dataset Fields The dataset has the following fields (also called "features"): ### Dataset Splits This dataset is split into a train and validation split. The split sizes are as follow:
[ "### Languages\n\n\nThe BCP-47 code for the dataset's language is unk.\n\n\nDataset Structure\n-----------------", "### Data Instances\n\n\nA sample from this dataset looks as follows:", "### Dataset Fields\n\n\nThe dataset has the following fields (also called \"features\"):", "### Dataset Splits\n\n\nThis dataset is split into a train and validation split. The split sizes are as follow:" ]
[ "TAGS\n#task_categories-text-classification #region-us \n", "### Languages\n\n\nThe BCP-47 code for the dataset's language is unk.\n\n\nDataset Structure\n-----------------", "### Data Instances\n\n\nA sample from this dataset looks as follows:", "### Dataset Fields\n\n\nThe dataset has the following fields (also called \"features\"):", "### Dataset Splits\n\n\nThis dataset is split into a train and validation split. The split sizes are as follow:" ]
[ 17, 27, 17, 23, 27 ]
[ "passage: TAGS\n#task_categories-text-classification #region-us \n### Languages\n\n\nThe BCP-47 code for the dataset's language is unk.\n\n\nDataset Structure\n-----------------### Data Instances\n\n\nA sample from this dataset looks as follows:### Dataset Fields\n\n\nThe dataset has the following fields (also called \"features\"):### Dataset Splits\n\n\nThis dataset is split into a train and validation split. The split sizes are as follow:" ]
2600a8eeeae6f0c5d5ce3cb49c50d2682c962c48
# Dataset Card for "c4_derived" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
pixel-coping/c4_derived
[ "region:us" ]
2023-10-08T11:32:55+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "c4", "path": "data/c4-*"}, {"split": "biomedical", "path": "data/biomedical-*"}, {"split": "counterfactual", "path": "data/counterfactual-*"}, {"split": "academic", "path": "data/academic-*"}]}], "dataset_info": {"features": [{"name": "text", "dtype": "string"}, {"name": "url", "dtype": "string"}], "splits": [{"name": "c4", "num_bytes": 1820234, "num_examples": 1000}, {"name": "biomedical", "num_bytes": 1803036, "num_examples": 989}, {"name": "counterfactual", "num_bytes": 1813882, "num_examples": 985}, {"name": "academic", "num_bytes": 1199491, "num_examples": 986}], "download_size": 4124290, "dataset_size": 6636643}}
2023-10-08T11:33:07+00:00
[]
[]
TAGS #region-us
# Dataset Card for "c4_derived" More Information needed
[ "# Dataset Card for \"c4_derived\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"c4_derived\"\n\nMore Information needed" ]
[ 6, 16 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"c4_derived\"\n\nMore Information needed" ]
3b7e4bc442fc48130e29890d6d97fcb61baeae50
# Dataset Card for "finance-alpaca-demo" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Ronal999/finance-alpaca-demo
[ "region:us" ]
2023-10-08T11:51:39+00:00
{"dataset_info": {"features": [{"name": "text", "dtype": "string"}, {"name": "instruction", "dtype": "string"}, {"name": "input", "dtype": "string"}, {"name": "output", "dtype": "string"}, {"name": "prompt", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 825832, "num_examples": 690}], "download_size": 456544, "dataset_size": 825832}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-08T11:51:43+00:00
[]
[]
TAGS #region-us
# Dataset Card for "finance-alpaca-demo" More Information needed
[ "# Dataset Card for \"finance-alpaca-demo\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"finance-alpaca-demo\"\n\nMore Information needed" ]
[ 6, 17 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"finance-alpaca-demo\"\n\nMore Information needed" ]
64d050d1105b1593e52b8db35b586951683f7725
# Dataset Card for "korean-child-free-voice_sample" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
star1sh/korean-child-free-voice_sample
[ "region:us" ]
2023-10-08T11:54:56+00:00
{"dataset_info": {"features": [{"name": "text", "dtype": "string"}, {"name": "audio_data", "sequence": "float32"}], "splits": [{"name": "train", "num_bytes": 6804670, "num_examples": 36}], "download_size": 0, "dataset_size": 6804670}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-08T12:25:15+00:00
[]
[]
TAGS #region-us
# Dataset Card for "korean-child-free-voice_sample" More Information needed
[ "# Dataset Card for \"korean-child-free-voice_sample\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"korean-child-free-voice_sample\"\n\nMore Information needed" ]
[ 6, 23 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"korean-child-free-voice_sample\"\n\nMore Information needed" ]
9d510835f707330531faac8f931dc862c4843559
# Dataset Card for "catholic_4800_dataset_20231008_131846" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
tr416/catholic_4800_dataset_20231008_131846
[ "region:us" ]
2023-10-08T12:18:47+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "input_ids", "sequence": "int32"}, {"name": "attention_mask", "sequence": "int8"}], "splits": [{"name": "train", "num_bytes": 760128.0, "num_examples": 296}, {"name": "test", "num_bytes": 7704.0, "num_examples": 3}], "download_size": 52079, "dataset_size": 767832.0}}
2023-10-08T12:18:48+00:00
[]
[]
TAGS #region-us
# Dataset Card for "catholic_4800_dataset_20231008_131846" More Information needed
[ "# Dataset Card for \"catholic_4800_dataset_20231008_131846\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"catholic_4800_dataset_20231008_131846\"\n\nMore Information needed" ]
[ 6, 27 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"catholic_4800_dataset_20231008_131846\"\n\nMore Information needed" ]
80bec648ea3d6713e8cc32e476ef56bd4433b55d
# Dataset Card for "catholic_4800_dataset_20231008_132059" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
tr416/catholic_4800_dataset_20231008_132059
[ "region:us" ]
2023-10-08T12:20:59+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "input_ids", "sequence": "int32"}, {"name": "attention_mask", "sequence": "int8"}], "splits": [{"name": "train", "num_bytes": 760128.0, "num_examples": 296}, {"name": "test", "num_bytes": 7704.0, "num_examples": 3}], "download_size": 52254, "dataset_size": 767832.0}}
2023-10-08T12:21:00+00:00
[]
[]
TAGS #region-us
# Dataset Card for "catholic_4800_dataset_20231008_132059" More Information needed
[ "# Dataset Card for \"catholic_4800_dataset_20231008_132059\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"catholic_4800_dataset_20231008_132059\"\n\nMore Information needed" ]
[ 6, 26 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"catholic_4800_dataset_20231008_132059\"\n\nMore Information needed" ]
d5b6be209b5057f9c1463424f5524ce1db368b35
# Dataset Card for "cfc9bbcd" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
result-kand2-sdxl-wuerst-karlo/cfc9bbcd
[ "region:us" ]
2023-10-08T12:50:51+00:00
{"dataset_info": {"features": [{"name": "result", "dtype": "string"}, {"name": "id", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 187, "num_examples": 10}], "download_size": 1339, "dataset_size": 187}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-08T12:50:52+00:00
[]
[]
TAGS #region-us
# Dataset Card for "cfc9bbcd" More Information needed
[ "# Dataset Card for \"cfc9bbcd\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"cfc9bbcd\"\n\nMore Information needed" ]
[ 6, 15 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"cfc9bbcd\"\n\nMore Information needed" ]
bdb1f09a8ee0d13d23464df964fdc9192e696f80
# Dataset Card for "irfan-junejo-tweerts" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Asad321/irfan-junejo-tweerts
[ "region:us" ]
2023-10-08T12:51:28+00:00
{"dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 42301, "num_examples": 126}], "download_size": 14643, "dataset_size": 42301}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-08T12:51:35+00:00
[]
[]
TAGS #region-us
# Dataset Card for "irfan-junejo-tweerts" More Information needed
[ "# Dataset Card for \"irfan-junejo-tweerts\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"irfan-junejo-tweerts\"\n\nMore Information needed" ]
[ 6, 19 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"irfan-junejo-tweerts\"\n\nMore Information needed" ]
7f63b9c3d86645435e4193cfbc74ca5457f27a40
task_categories: - question-answering ---
lollox/math_dataset_50k
[ "region:us" ]
2023-10-08T13:04:54+00:00
{}
2023-10-08T13:51:58+00:00
[]
[]
TAGS #region-us
task_categories: - question-answering ---
[]
[ "TAGS\n#region-us \n" ]
[ 6 ]
[ "passage: TAGS\n#region-us \n" ]
3e2c1dc9e95d1f4e5b941644fa6719a41d191acd
# Dataset Card for Evaluation run of PulsarAI/Chat-AYB-Nova-13B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/PulsarAI/Chat-AYB-Nova-13B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [PulsarAI/Chat-AYB-Nova-13B](https://huggingface.co/PulsarAI/Chat-AYB-Nova-13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_PulsarAI__Chat-AYB-Nova-13B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-27T20:18:17.450635](https://huggingface.co/datasets/open-llm-leaderboard/details_PulsarAI__Chat-AYB-Nova-13B/blob/main/results_2023-10-27T20-18-17.450635.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0041946308724832215, "em_stderr": 0.0006618716168266419, "f1": 0.0802946728187919, "f1_stderr": 0.0016873252068220475, "acc": 0.44971346473405205, "acc_stderr": 0.010392725523775513 }, "harness|drop|3": { "em": 0.0041946308724832215, "em_stderr": 0.0006618716168266419, "f1": 0.0802946728187919, "f1_stderr": 0.0016873252068220475 }, "harness|gsm8k|5": { "acc": 0.12357846853677028, "acc_stderr": 0.009065050306776921 }, "harness|winogrande|5": { "acc": 0.7758484609313339, "acc_stderr": 0.011720400740774104 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_PulsarAI__Chat-AYB-Nova-13B
[ "region:us" ]
2023-10-08T13:44:56+00:00
{"pretty_name": "Evaluation run of PulsarAI/Chat-AYB-Nova-13B", "dataset_summary": "Dataset automatically created during the evaluation run of model [PulsarAI/Chat-AYB-Nova-13B](https://huggingface.co/PulsarAI/Chat-AYB-Nova-13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_PulsarAI__Chat-AYB-Nova-13B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-27T20:18:17.450635](https://huggingface.co/datasets/open-llm-leaderboard/details_PulsarAI__Chat-AYB-Nova-13B/blob/main/results_2023-10-27T20-18-17.450635.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0041946308724832215,\n \"em_stderr\": 0.0006618716168266419,\n \"f1\": 0.0802946728187919,\n \"f1_stderr\": 0.0016873252068220475,\n \"acc\": 0.44971346473405205,\n \"acc_stderr\": 0.010392725523775513\n },\n \"harness|drop|3\": {\n \"em\": 0.0041946308724832215,\n \"em_stderr\": 0.0006618716168266419,\n \"f1\": 0.0802946728187919,\n \"f1_stderr\": 0.0016873252068220475\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.12357846853677028,\n \"acc_stderr\": 0.009065050306776921\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7758484609313339,\n \"acc_stderr\": 0.011720400740774104\n }\n}\n```", "repo_url": "https://huggingface.co/PulsarAI/Chat-AYB-Nova-13B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|arc:challenge|25_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_27T20_18_17.450635", "path": ["**/details_harness|drop|3_2023-10-27T20-18-17.450635.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-27T20-18-17.450635.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_27T20_18_17.450635", "path": ["**/details_harness|gsm8k|5_2023-10-27T20-18-17.450635.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-27T20-18-17.450635.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hellaswag|10_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T14-44-32.660445.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T14-44-32.660445.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T14-44-32.660445.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_27T20_18_17.450635", "path": ["**/details_harness|winogrande|5_2023-10-27T20-18-17.450635.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-27T20-18-17.450635.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_08T14_44_32.660445", "path": ["results_2023-10-08T14-44-32.660445.parquet"]}, {"split": "2023_10_27T20_18_17.450635", "path": ["results_2023-10-27T20-18-17.450635.parquet"]}, {"split": "latest", "path": ["results_2023-10-27T20-18-17.450635.parquet"]}]}]}
2023-10-27T19:18:30+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of PulsarAI/Chat-AYB-Nova-13B ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model PulsarAI/Chat-AYB-Nova-13B on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-27T20:18:17.450635(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of PulsarAI/Chat-AYB-Nova-13B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model PulsarAI/Chat-AYB-Nova-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-27T20:18:17.450635(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of PulsarAI/Chat-AYB-Nova-13B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model PulsarAI/Chat-AYB-Nova-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-27T20:18:17.450635(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 22, 31, 170, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of PulsarAI/Chat-AYB-Nova-13B## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model PulsarAI/Chat-AYB-Nova-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-27T20:18:17.450635(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
312fb4a1341a6bbcfb4bf893780ae6cf7d8e9335
# Dataset Card for "chart_captioning" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
oroikon/chart_captioning
[ "region:us" ]
2023-10-08T13:45:34+00:00
{"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 395695728.546, "num_examples": 7057}, {"name": "test", "num_bytes": 48381523.0, "num_examples": 882}, {"name": "validation", "num_bytes": 48266912.0, "num_examples": 883}], "download_size": 480469420, "dataset_size": 492344163.546}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}, {"split": "validation", "path": "data/validation-*"}]}]}
2023-10-08T14:48:31+00:00
[]
[]
TAGS #region-us
# Dataset Card for "chart_captioning" More Information needed
[ "# Dataset Card for \"chart_captioning\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"chart_captioning\"\n\nMore Information needed" ]
[ 6, 16 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"chart_captioning\"\n\nMore Information needed" ]
2c4972c006e1f94c955b02ff3ad47124ea0fb5ac
# Dataset Card for Evaluation run of PulsarAI/Chat-AYB-Platypus2-13B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/PulsarAI/Chat-AYB-Platypus2-13B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [PulsarAI/Chat-AYB-Platypus2-13B](https://huggingface.co/PulsarAI/Chat-AYB-Platypus2-13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_PulsarAI__Chat-AYB-Platypus2-13B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-28T16:53:41.047162](https://huggingface.co/datasets/open-llm-leaderboard/details_PulsarAI__Chat-AYB-Platypus2-13B/blob/main/results_2023-10-28T16-53-41.047162.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.2752726510067114, "em_stderr": 0.0045741300617909856, "f1": 0.38116505872483314, "f1_stderr": 0.004403649120675284, "acc": 0.3936315988829403, "acc_stderr": 0.0083541228301978 }, "harness|drop|3": { "em": 0.2752726510067114, "em_stderr": 0.0045741300617909856, "f1": 0.38116505872483314, "f1_stderr": 0.004403649120675284 }, "harness|gsm8k|5": { "acc": 0.029567854435178165, "acc_stderr": 0.004665893134220814 }, "harness|winogrande|5": { "acc": 0.7576953433307024, "acc_stderr": 0.012042352526174785 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_PulsarAI__Chat-AYB-Platypus2-13B
[ "region:us" ]
2023-10-08T13:46:28+00:00
{"pretty_name": "Evaluation run of PulsarAI/Chat-AYB-Platypus2-13B", "dataset_summary": "Dataset automatically created during the evaluation run of model [PulsarAI/Chat-AYB-Platypus2-13B](https://huggingface.co/PulsarAI/Chat-AYB-Platypus2-13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_PulsarAI__Chat-AYB-Platypus2-13B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-28T16:53:41.047162](https://huggingface.co/datasets/open-llm-leaderboard/details_PulsarAI__Chat-AYB-Platypus2-13B/blob/main/results_2023-10-28T16-53-41.047162.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.2752726510067114,\n \"em_stderr\": 0.0045741300617909856,\n \"f1\": 0.38116505872483314,\n \"f1_stderr\": 0.004403649120675284,\n \"acc\": 0.3936315988829403,\n \"acc_stderr\": 0.0083541228301978\n },\n \"harness|drop|3\": {\n \"em\": 0.2752726510067114,\n \"em_stderr\": 0.0045741300617909856,\n \"f1\": 0.38116505872483314,\n \"f1_stderr\": 0.004403649120675284\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.029567854435178165,\n \"acc_stderr\": 0.004665893134220814\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7576953433307024,\n \"acc_stderr\": 0.012042352526174785\n }\n}\n```", "repo_url": "https://huggingface.co/PulsarAI/Chat-AYB-Platypus2-13B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|arc:challenge|25_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_28T16_53_41.047162", "path": ["**/details_harness|drop|3_2023-10-28T16-53-41.047162.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-28T16-53-41.047162.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_28T16_53_41.047162", "path": ["**/details_harness|gsm8k|5_2023-10-28T16-53-41.047162.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-28T16-53-41.047162.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hellaswag|10_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T14-46-05.202813.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T14-46-05.202813.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T14-46-05.202813.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_28T16_53_41.047162", "path": ["**/details_harness|winogrande|5_2023-10-28T16-53-41.047162.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-28T16-53-41.047162.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_08T14_46_05.202813", "path": ["results_2023-10-08T14-46-05.202813.parquet"]}, {"split": "2023_10_28T16_53_41.047162", "path": ["results_2023-10-28T16-53-41.047162.parquet"]}, {"split": "latest", "path": ["results_2023-10-28T16-53-41.047162.parquet"]}]}]}
2023-10-28T15:53:53+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of PulsarAI/Chat-AYB-Platypus2-13B ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model PulsarAI/Chat-AYB-Platypus2-13B on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-28T16:53:41.047162(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of PulsarAI/Chat-AYB-Platypus2-13B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model PulsarAI/Chat-AYB-Platypus2-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-28T16:53:41.047162(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of PulsarAI/Chat-AYB-Platypus2-13B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model PulsarAI/Chat-AYB-Platypus2-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-28T16:53:41.047162(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 24, 31, 172, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of PulsarAI/Chat-AYB-Platypus2-13B## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model PulsarAI/Chat-AYB-Platypus2-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-28T16:53:41.047162(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
35b6b7248dadbb8d8fc4adc077f8662458f99312
# Dataset Card for Evaluation run of PulsarAI/2x-LoRA-Assemble-Nova-13B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/PulsarAI/2x-LoRA-Assemble-Nova-13B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [PulsarAI/2x-LoRA-Assemble-Nova-13B](https://huggingface.co/PulsarAI/2x-LoRA-Assemble-Nova-13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_PulsarAI__2x-LoRA-Assemble-Nova-13B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-26T09:15:27.308196](https://huggingface.co/datasets/open-llm-leaderboard/details_PulsarAI__2x-LoRA-Assemble-Nova-13B/blob/main/results_2023-10-26T09-15-27.308196.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.005243288590604027, "em_stderr": 0.0007396052260778, "f1": 0.08796455536912774, "f1_stderr": 0.0018271669211415338, "acc": 0.4359422992113922, "acc_stderr": 0.010092491580522747 }, "harness|drop|3": { "em": 0.005243288590604027, "em_stderr": 0.0007396052260778, "f1": 0.08796455536912774, "f1_stderr": 0.0018271669211415338 }, "harness|gsm8k|5": { "acc": 0.1023502653525398, "acc_stderr": 0.008349110996208824 }, "harness|winogrande|5": { "acc": 0.7695343330702447, "acc_stderr": 0.01183587216483667 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_PulsarAI__2x-LoRA-Assemble-Nova-13B
[ "region:us" ]
2023-10-08T13:51:33+00:00
{"pretty_name": "Evaluation run of PulsarAI/2x-LoRA-Assemble-Nova-13B", "dataset_summary": "Dataset automatically created during the evaluation run of model [PulsarAI/2x-LoRA-Assemble-Nova-13B](https://huggingface.co/PulsarAI/2x-LoRA-Assemble-Nova-13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_PulsarAI__2x-LoRA-Assemble-Nova-13B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-26T09:15:27.308196](https://huggingface.co/datasets/open-llm-leaderboard/details_PulsarAI__2x-LoRA-Assemble-Nova-13B/blob/main/results_2023-10-26T09-15-27.308196.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.005243288590604027,\n \"em_stderr\": 0.0007396052260778,\n \"f1\": 0.08796455536912774,\n \"f1_stderr\": 0.0018271669211415338,\n \"acc\": 0.4359422992113922,\n \"acc_stderr\": 0.010092491580522747\n },\n \"harness|drop|3\": {\n \"em\": 0.005243288590604027,\n \"em_stderr\": 0.0007396052260778,\n \"f1\": 0.08796455536912774,\n \"f1_stderr\": 0.0018271669211415338\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.1023502653525398,\n \"acc_stderr\": 0.008349110996208824\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7695343330702447,\n \"acc_stderr\": 0.01183587216483667\n }\n}\n```", "repo_url": "https://huggingface.co/PulsarAI/2x-LoRA-Assemble-Nova-13B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|arc:challenge|25_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_26T09_15_27.308196", "path": ["**/details_harness|drop|3_2023-10-26T09-15-27.308196.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-26T09-15-27.308196.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_26T09_15_27.308196", "path": ["**/details_harness|gsm8k|5_2023-10-26T09-15-27.308196.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-26T09-15-27.308196.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hellaswag|10_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T14-51-09.823341.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T14-51-09.823341.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T14-51-09.823341.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_26T09_15_27.308196", "path": ["**/details_harness|winogrande|5_2023-10-26T09-15-27.308196.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-26T09-15-27.308196.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_08T14_51_09.823341", "path": ["results_2023-10-08T14-51-09.823341.parquet"]}, {"split": "2023_10_26T09_15_27.308196", "path": ["results_2023-10-26T09-15-27.308196.parquet"]}, {"split": "latest", "path": ["results_2023-10-26T09-15-27.308196.parquet"]}]}]}
2023-10-26T08:15:40+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of PulsarAI/2x-LoRA-Assemble-Nova-13B ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model PulsarAI/2x-LoRA-Assemble-Nova-13B on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-26T09:15:27.308196(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of PulsarAI/2x-LoRA-Assemble-Nova-13B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model PulsarAI/2x-LoRA-Assemble-Nova-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-26T09:15:27.308196(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of PulsarAI/2x-LoRA-Assemble-Nova-13B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model PulsarAI/2x-LoRA-Assemble-Nova-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-26T09:15:27.308196(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 26, 31, 174, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of PulsarAI/2x-LoRA-Assemble-Nova-13B## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model PulsarAI/2x-LoRA-Assemble-Nova-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-26T09:15:27.308196(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
e922135e8d9833ed773e98231078bd501053e005
# Dataset Card for Evaluation run of PulsarAI/2x-LoRA-Assemble-Platypus2-13B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/PulsarAI/2x-LoRA-Assemble-Platypus2-13B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [PulsarAI/2x-LoRA-Assemble-Platypus2-13B](https://huggingface.co/PulsarAI/2x-LoRA-Assemble-Platypus2-13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_PulsarAI__2x-LoRA-Assemble-Platypus2-13B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-26T04:49:09.510505](https://huggingface.co/datasets/open-llm-leaderboard/details_PulsarAI__2x-LoRA-Assemble-Platypus2-13B/blob/main/results_2023-10-26T04-49-09.510505.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.14020553691275167, "em_stderr": 0.003555654511760366, "f1": 0.25958473154362444, "f1_stderr": 0.003697673494004961, "acc": 0.3790556094431875, "acc_stderr": 0.007400551365645916 }, "harness|drop|3": { "em": 0.14020553691275167, "em_stderr": 0.003555654511760366, "f1": 0.25958473154362444, "f1_stderr": 0.003697673494004961 }, "harness|gsm8k|5": { "acc": 0.009097801364670205, "acc_stderr": 0.002615326510775672 }, "harness|winogrande|5": { "acc": 0.7490134175217048, "acc_stderr": 0.012185776220516161 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_PulsarAI__2x-LoRA-Assemble-Platypus2-13B
[ "region:us" ]
2023-10-08T13:58:57+00:00
{"pretty_name": "Evaluation run of PulsarAI/2x-LoRA-Assemble-Platypus2-13B", "dataset_summary": "Dataset automatically created during the evaluation run of model [PulsarAI/2x-LoRA-Assemble-Platypus2-13B](https://huggingface.co/PulsarAI/2x-LoRA-Assemble-Platypus2-13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_PulsarAI__2x-LoRA-Assemble-Platypus2-13B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-26T04:49:09.510505](https://huggingface.co/datasets/open-llm-leaderboard/details_PulsarAI__2x-LoRA-Assemble-Platypus2-13B/blob/main/results_2023-10-26T04-49-09.510505.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.14020553691275167,\n \"em_stderr\": 0.003555654511760366,\n \"f1\": 0.25958473154362444,\n \"f1_stderr\": 0.003697673494004961,\n \"acc\": 0.3790556094431875,\n \"acc_stderr\": 0.007400551365645916\n },\n \"harness|drop|3\": {\n \"em\": 0.14020553691275167,\n \"em_stderr\": 0.003555654511760366,\n \"f1\": 0.25958473154362444,\n \"f1_stderr\": 0.003697673494004961\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.009097801364670205,\n \"acc_stderr\": 0.002615326510775672\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7490134175217048,\n \"acc_stderr\": 0.012185776220516161\n }\n}\n```", "repo_url": "https://huggingface.co/PulsarAI/2x-LoRA-Assemble-Platypus2-13B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|arc:challenge|25_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_26T04_49_09.510505", "path": ["**/details_harness|drop|3_2023-10-26T04-49-09.510505.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-26T04-49-09.510505.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_26T04_49_09.510505", "path": ["**/details_harness|gsm8k|5_2023-10-26T04-49-09.510505.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-26T04-49-09.510505.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hellaswag|10_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T14-58-33.553023.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T14-58-33.553023.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T14-58-33.553023.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_26T04_49_09.510505", "path": ["**/details_harness|winogrande|5_2023-10-26T04-49-09.510505.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-26T04-49-09.510505.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_08T14_58_33.553023", "path": ["results_2023-10-08T14-58-33.553023.parquet"]}, {"split": "2023_10_26T04_49_09.510505", "path": ["results_2023-10-26T04-49-09.510505.parquet"]}, {"split": "latest", "path": ["results_2023-10-26T04-49-09.510505.parquet"]}]}]}
2023-10-26T03:49:23+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of PulsarAI/2x-LoRA-Assemble-Platypus2-13B ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model PulsarAI/2x-LoRA-Assemble-Platypus2-13B on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-26T04:49:09.510505(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of PulsarAI/2x-LoRA-Assemble-Platypus2-13B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model PulsarAI/2x-LoRA-Assemble-Platypus2-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-26T04:49:09.510505(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of PulsarAI/2x-LoRA-Assemble-Platypus2-13B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model PulsarAI/2x-LoRA-Assemble-Platypus2-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-26T04:49:09.510505(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 28, 31, 176, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of PulsarAI/2x-LoRA-Assemble-Platypus2-13B## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model PulsarAI/2x-LoRA-Assemble-Platypus2-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-26T04:49:09.510505(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
4ec5db6594e1257c3dca40265379a613f78cbb8f
# Dataset Card for Evaluation run of PulsarAI/GenAI-Nova-13B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/PulsarAI/GenAI-Nova-13B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [PulsarAI/GenAI-Nova-13B](https://huggingface.co/PulsarAI/GenAI-Nova-13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_PulsarAI__GenAI-Nova-13B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-29T14:58:59.300779](https://huggingface.co/datasets/open-llm-leaderboard/details_PulsarAI__GenAI-Nova-13B/blob/main/results_2023-10-29T14-58-59.300779.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.10769714765100671, "em_stderr": 0.003174664916131534, "f1": 0.18815016778523358, "f1_stderr": 0.0033317211011039192, "acc": 0.4254059872915611, "acc_stderr": 0.009560931288960338 }, "harness|drop|3": { "em": 0.10769714765100671, "em_stderr": 0.003174664916131534, "f1": 0.18815016778523358, "f1_stderr": 0.0033317211011039192 }, "harness|gsm8k|5": { "acc": 0.07733131159969674, "acc_stderr": 0.007357713523222347 }, "harness|winogrande|5": { "acc": 0.7734806629834254, "acc_stderr": 0.01176414905469833 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_PulsarAI__GenAI-Nova-13B
[ "region:us" ]
2023-10-08T14:05:43+00:00
{"pretty_name": "Evaluation run of PulsarAI/GenAI-Nova-13B", "dataset_summary": "Dataset automatically created during the evaluation run of model [PulsarAI/GenAI-Nova-13B](https://huggingface.co/PulsarAI/GenAI-Nova-13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_PulsarAI__GenAI-Nova-13B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-29T14:58:59.300779](https://huggingface.co/datasets/open-llm-leaderboard/details_PulsarAI__GenAI-Nova-13B/blob/main/results_2023-10-29T14-58-59.300779.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.10769714765100671,\n \"em_stderr\": 0.003174664916131534,\n \"f1\": 0.18815016778523358,\n \"f1_stderr\": 0.0033317211011039192,\n \"acc\": 0.4254059872915611,\n \"acc_stderr\": 0.009560931288960338\n },\n \"harness|drop|3\": {\n \"em\": 0.10769714765100671,\n \"em_stderr\": 0.003174664916131534,\n \"f1\": 0.18815016778523358,\n \"f1_stderr\": 0.0033317211011039192\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.07733131159969674,\n \"acc_stderr\": 0.007357713523222347\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7734806629834254,\n \"acc_stderr\": 0.01176414905469833\n }\n}\n```", "repo_url": "https://huggingface.co/PulsarAI/GenAI-Nova-13B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|arc:challenge|25_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_29T14_58_59.300779", "path": ["**/details_harness|drop|3_2023-10-29T14-58-59.300779.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-29T14-58-59.300779.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_29T14_58_59.300779", "path": ["**/details_harness|gsm8k|5_2023-10-29T14-58-59.300779.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-29T14-58-59.300779.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hellaswag|10_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T15-05-19.512883.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T15-05-19.512883.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T15-05-19.512883.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_29T14_58_59.300779", "path": ["**/details_harness|winogrande|5_2023-10-29T14-58-59.300779.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-29T14-58-59.300779.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_08T15_05_19.512883", "path": ["results_2023-10-08T15-05-19.512883.parquet"]}, {"split": "2023_10_29T14_58_59.300779", "path": ["results_2023-10-29T14-58-59.300779.parquet"]}, {"split": "latest", "path": ["results_2023-10-29T14-58-59.300779.parquet"]}]}]}
2023-10-29T14:59:11+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of PulsarAI/GenAI-Nova-13B ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model PulsarAI/GenAI-Nova-13B on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-29T14:58:59.300779(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of PulsarAI/GenAI-Nova-13B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model PulsarAI/GenAI-Nova-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-29T14:58:59.300779(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of PulsarAI/GenAI-Nova-13B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model PulsarAI/GenAI-Nova-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-29T14:58:59.300779(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 20, 31, 168, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of PulsarAI/GenAI-Nova-13B## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model PulsarAI/GenAI-Nova-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-29T14:58:59.300779(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
576daddd3566fe4c62d2844e7188c2a0e29643dd
# Dataset Card for "audio_dataset1" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
hsali/audio_dataset1
[ "region:us" ]
2023-10-08T14:09:06+00:00
{"dataset_info": {"features": [{"name": "audio", "dtype": "audio"}, {"name": "file_id", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 439185.0, "num_examples": 5}], "download_size": 427010, "dataset_size": 439185.0}}
2023-10-08T14:09:15+00:00
[]
[]
TAGS #region-us
# Dataset Card for "audio_dataset1" More Information needed
[ "# Dataset Card for \"audio_dataset1\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"audio_dataset1\"\n\nMore Information needed" ]
[ 6, 16 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"audio_dataset1\"\n\nMore Information needed" ]
54dd1b7d21eee93191bc9dfcbc71d57ccdc64b08
# Dataset Card for "embeddings_from_distilbert_masking_heaps_and_eval_part0_test" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
johannes-garstenauer/embeddings_from_distilbert_masking_heaps_and_eval_part0_test
[ "region:us" ]
2023-10-08T14:31:41+00:00
{"dataset_info": {"features": [{"name": "struct", "dtype": "string"}, {"name": "label", "dtype": "int64"}, {"name": "pred", "dtype": "int64"}, {"name": "cls_layer_6", "sequence": "float32"}, {"name": "cls_layer_5", "sequence": "float32"}, {"name": "cls_layer_4", "sequence": "float32"}], "splits": [{"name": "train", "num_bytes": 13428556, "num_examples": 1408}], "download_size": 16660183, "dataset_size": 13428556}}
2023-10-08T14:31:50+00:00
[]
[]
TAGS #region-us
# Dataset Card for "embeddings_from_distilbert_masking_heaps_and_eval_part0_test" More Information needed
[ "# Dataset Card for \"embeddings_from_distilbert_masking_heaps_and_eval_part0_test\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"embeddings_from_distilbert_masking_heaps_and_eval_part0_test\"\n\nMore Information needed" ]
[ 6, 35 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"embeddings_from_distilbert_masking_heaps_and_eval_part0_test\"\n\nMore Information needed" ]
cf4d7aa807d1d60c24926858f541b665caeb718a
# Dataset Card for "embeddings_from_distilbert_masking_heaps_and_eval_part1_test" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
johannes-garstenauer/embeddings_from_distilbert_masking_heaps_and_eval_part1_test
[ "region:us" ]
2023-10-08T14:32:01+00:00
{"dataset_info": {"features": [{"name": "struct", "dtype": "string"}, {"name": "label", "dtype": "int64"}, {"name": "pred", "dtype": "int64"}, {"name": "cls_layer_6", "sequence": "float32"}, {"name": "cls_layer_5", "sequence": "float32"}, {"name": "cls_layer_4", "sequence": "float32"}], "splits": [{"name": "train", "num_bytes": 12230881, "num_examples": 1283}], "download_size": 14962458, "dataset_size": 12230881}}
2023-10-08T14:32:10+00:00
[]
[]
TAGS #region-us
# Dataset Card for "embeddings_from_distilbert_masking_heaps_and_eval_part1_test" More Information needed
[ "# Dataset Card for \"embeddings_from_distilbert_masking_heaps_and_eval_part1_test\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"embeddings_from_distilbert_masking_heaps_and_eval_part1_test\"\n\nMore Information needed" ]
[ 6, 35 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"embeddings_from_distilbert_masking_heaps_and_eval_part1_test\"\n\nMore Information needed" ]
37fe27b8d8b914e69039ef3baea9783ba699fb38
# Dataset Card for "jquiros-suicide-es" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Intuit-GenSRF/jquiros-suicide-es
[ "region:us" ]
2023-10-08T14:39:09+00:00
{"dataset_info": {"features": [{"name": "text", "dtype": "string"}, {"name": "labels", "sequence": "string"}, {"name": "processed_text", "sequence": "string"}, {"name": "num_tokens", "dtype": "int64"}, {"name": "text_es", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 434028422, "num_examples": 230832}], "download_size": 266158998, "dataset_size": 434028422}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-08T14:39:21+00:00
[]
[]
TAGS #region-us
# Dataset Card for "jquiros-suicide-es" More Information needed
[ "# Dataset Card for \"jquiros-suicide-es\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"jquiros-suicide-es\"\n\nMore Information needed" ]
[ 6, 19 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"jquiros-suicide-es\"\n\nMore Information needed" ]
c19291cbe07769f11905b4ce5c9c7e0988b18aed
# Dataset Card for "tune-forms" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Dmkond/tune-forms
[ "region:us" ]
2023-10-08T14:44:22+00:00
{"dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 842248, "num_examples": 200}], "download_size": 221015, "dataset_size": 842248}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-08T14:44:24+00:00
[]
[]
TAGS #region-us
# Dataset Card for "tune-forms" More Information needed
[ "# Dataset Card for \"tune-forms\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"tune-forms\"\n\nMore Information needed" ]
[ 6, 14 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"tune-forms\"\n\nMore Information needed" ]
798af31f81eb35f5ca2413f85ce6f3b1986145a8
# Dataset Card for Dataset Name CSV file of embedded abtracts collected from https://lfs.aminer.cn/lab-datasets/citation/citation-network1.zip Columns are title,authors,year,venue,index,abstract,embedding,references in that order.
ppxscal/embedding_network
[ "license:mit", "region:us" ]
2023-10-08T14:48:47+00:00
{"license": "mit"}
2023-10-18T15:34:31+00:00
[]
[]
TAGS #license-mit #region-us
# Dataset Card for Dataset Name CSV file of embedded abtracts collected from URL Columns are title,authors,year,venue,index,abstract,embedding,references in that order.
[ "# Dataset Card for Dataset Name\n\nCSV file of embedded abtracts collected from URL\n\nColumns are title,authors,year,venue,index,abstract,embedding,references in that order." ]
[ "TAGS\n#license-mit #region-us \n", "# Dataset Card for Dataset Name\n\nCSV file of embedded abtracts collected from URL\n\nColumns are title,authors,year,venue,index,abstract,embedding,references in that order." ]
[ 11, 50 ]
[ "passage: TAGS\n#license-mit #region-us \n# Dataset Card for Dataset Name\n\nCSV file of embedded abtracts collected from URL\n\nColumns are title,authors,year,venue,index,abstract,embedding,references in that order." ]
969a35845600591330d2d838755c578ee57c1edd
# Dataset Card for "Face-Aging-Dataset" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
BounharAbdelaziz/Face-Aging-Dataset
[ "region:us" ]
2023-10-08T14:52:09+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "age_domain_20_35", "1": "age_domain_36_60", "2": "age_domain_60_90"}}}}], "splits": [{"name": "train", "num_bytes": 16136815235.988, "num_examples": 40252}], "download_size": 16202626214, "dataset_size": 16136815235.988}}
2023-10-08T15:16:52+00:00
[]
[]
TAGS #region-us
# Dataset Card for "Face-Aging-Dataset" More Information needed
[ "# Dataset Card for \"Face-Aging-Dataset\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"Face-Aging-Dataset\"\n\nMore Information needed" ]
[ 6, 18 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"Face-Aging-Dataset\"\n\nMore Information needed" ]
6db1db44dbcb7b85ea6dd0e4f139251ffa2bf33d
# Dataset Card for "Face-Gender-Swap" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
BounharAbdelaziz/Face-Gender-Swap
[ "region:us" ]
2023-10-08T14:53:22+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "domain_F", "1": "domain_M"}}}}], "splits": [{"name": "train", "num_bytes": 20710300480.468, "num_examples": 51604}], "download_size": 20737281406, "dataset_size": 20710300480.468}}
2023-10-08T15:25:03+00:00
[]
[]
TAGS #region-us
# Dataset Card for "Face-Gender-Swap" More Information needed
[ "# Dataset Card for \"Face-Gender-Swap\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"Face-Gender-Swap\"\n\nMore Information needed" ]
[ 6, 19 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"Face-Gender-Swap\"\n\nMore Information needed" ]
54586ffbb8883e71fccef0c9f91f974457f44d25
This dataset contains a slice of 200 samples from the [AG News](https://huggingface.co/datasets/ag_news) dataset (test split). The picked 200 samples are potential misclassifications of the original test data. Approach * Fine-tune DistilBERT with 10k samples from the training data (out of 120k) * Do a forward pass with the model, storing the loss * Sort the samples based on the loss This is a repository for demonstration purposes
osanseviero/ag_misclassifications
[ "region:us" ]
2023-10-08T14:54:11+00:00
{}
2023-10-08T14:57:20+00:00
[]
[]
TAGS #region-us
This dataset contains a slice of 200 samples from the AG News dataset (test split). The picked 200 samples are potential misclassifications of the original test data. Approach * Fine-tune DistilBERT with 10k samples from the training data (out of 120k) * Do a forward pass with the model, storing the loss * Sort the samples based on the loss This is a repository for demonstration purposes
[]
[ "TAGS\n#region-us \n" ]
[ 6 ]
[ "passage: TAGS\n#region-us \n" ]
c92e2e615802a78de7720a8abef7abac35e2ec76
# Dataset Card for "wiki-movie-plots-with-summaries-faiss-embeddings" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
vishnupriyavr/wiki-movie-plots-with-summaries-faiss-embeddings
[ "region:us" ]
2023-10-08T15:02:41+00:00
{"dataset_info": {"features": [{"name": "Release Year", "dtype": "int64"}, {"name": "Title", "dtype": "string"}, {"name": "Cast", "dtype": "string"}, {"name": "Wiki Page", "dtype": "string"}, {"name": "Plot", "dtype": "string"}, {"name": "plot_length", "dtype": "int64"}, {"name": "text", "dtype": "string"}, {"name": "embeddings", "sequence": "float32"}], "splits": [{"name": "train", "num_bytes": 256974740, "num_examples": 33155}], "download_size": 216835238, "dataset_size": 256974740}}
2023-10-08T15:02:50+00:00
[]
[]
TAGS #region-us
# Dataset Card for "wiki-movie-plots-with-summaries-faiss-embeddings" More Information needed
[ "# Dataset Card for \"wiki-movie-plots-with-summaries-faiss-embeddings\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"wiki-movie-plots-with-summaries-faiss-embeddings\"\n\nMore Information needed" ]
[ 6, 29 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"wiki-movie-plots-with-summaries-faiss-embeddings\"\n\nMore Information needed" ]
ecb4d3dea8354e565c7a54dafe8a4c6eae673da7
# Bangumi Image Base of Rurouni Kenshin (2023) This is the image base of bangumi Rurouni Kenshin (2023), we detected 38 characters, 4087 images in total. The full dataset is [here](all.zip). **Please note that these image bases are not guaranteed to be 100% cleaned, they may be noisy actual.** If you intend to manually train models using this dataset, we recommend performing necessary preprocessing on the downloaded dataset to eliminate potential noisy samples (approximately 1% probability). Here is the characters' preview: | # | Images | Download | Preview 1 | Preview 2 | Preview 3 | Preview 4 | Preview 5 | Preview 6 | Preview 7 | Preview 8 | |:------|---------:|:---------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------| | 0 | 61 | [Download](0/dataset.zip) | ![preview 1](0/preview_1.png) | ![preview 2](0/preview_2.png) | ![preview 3](0/preview_3.png) | ![preview 4](0/preview_4.png) | ![preview 5](0/preview_5.png) | ![preview 6](0/preview_6.png) | ![preview 7](0/preview_7.png) | ![preview 8](0/preview_8.png) | | 1 | 267 | [Download](1/dataset.zip) | ![preview 1](1/preview_1.png) | ![preview 2](1/preview_2.png) | ![preview 3](1/preview_3.png) | ![preview 4](1/preview_4.png) | ![preview 5](1/preview_5.png) | ![preview 6](1/preview_6.png) | ![preview 7](1/preview_7.png) | ![preview 8](1/preview_8.png) | | 2 | 106 | [Download](2/dataset.zip) | ![preview 1](2/preview_1.png) | ![preview 2](2/preview_2.png) | ![preview 3](2/preview_3.png) | ![preview 4](2/preview_4.png) | ![preview 5](2/preview_5.png) | ![preview 6](2/preview_6.png) | ![preview 7](2/preview_7.png) | ![preview 8](2/preview_8.png) | | 3 | 35 | [Download](3/dataset.zip) | ![preview 1](3/preview_1.png) | ![preview 2](3/preview_2.png) | ![preview 3](3/preview_3.png) | ![preview 4](3/preview_4.png) | ![preview 5](3/preview_5.png) | ![preview 6](3/preview_6.png) | ![preview 7](3/preview_7.png) | ![preview 8](3/preview_8.png) | | 4 | 19 | [Download](4/dataset.zip) | ![preview 1](4/preview_1.png) | ![preview 2](4/preview_2.png) | ![preview 3](4/preview_3.png) | ![preview 4](4/preview_4.png) | ![preview 5](4/preview_5.png) | ![preview 6](4/preview_6.png) | ![preview 7](4/preview_7.png) | ![preview 8](4/preview_8.png) | | 5 | 82 | [Download](5/dataset.zip) | ![preview 1](5/preview_1.png) | ![preview 2](5/preview_2.png) | ![preview 3](5/preview_3.png) | ![preview 4](5/preview_4.png) | ![preview 5](5/preview_5.png) | ![preview 6](5/preview_6.png) | ![preview 7](5/preview_7.png) | ![preview 8](5/preview_8.png) | | 6 | 117 | [Download](6/dataset.zip) | ![preview 1](6/preview_1.png) | ![preview 2](6/preview_2.png) | ![preview 3](6/preview_3.png) | ![preview 4](6/preview_4.png) | ![preview 5](6/preview_5.png) | ![preview 6](6/preview_6.png) | ![preview 7](6/preview_7.png) | ![preview 8](6/preview_8.png) | | 7 | 435 | [Download](7/dataset.zip) | ![preview 1](7/preview_1.png) | ![preview 2](7/preview_2.png) | ![preview 3](7/preview_3.png) | ![preview 4](7/preview_4.png) | ![preview 5](7/preview_5.png) | ![preview 6](7/preview_6.png) | ![preview 7](7/preview_7.png) | ![preview 8](7/preview_8.png) | | 8 | 29 | [Download](8/dataset.zip) | ![preview 1](8/preview_1.png) | ![preview 2](8/preview_2.png) | ![preview 3](8/preview_3.png) | ![preview 4](8/preview_4.png) | ![preview 5](8/preview_5.png) | ![preview 6](8/preview_6.png) | ![preview 7](8/preview_7.png) | ![preview 8](8/preview_8.png) | | 9 | 54 | [Download](9/dataset.zip) | ![preview 1](9/preview_1.png) | ![preview 2](9/preview_2.png) | ![preview 3](9/preview_3.png) | ![preview 4](9/preview_4.png) | ![preview 5](9/preview_5.png) | ![preview 6](9/preview_6.png) | ![preview 7](9/preview_7.png) | ![preview 8](9/preview_8.png) | | 10 | 35 | [Download](10/dataset.zip) | ![preview 1](10/preview_1.png) | ![preview 2](10/preview_2.png) | ![preview 3](10/preview_3.png) | ![preview 4](10/preview_4.png) | ![preview 5](10/preview_5.png) | ![preview 6](10/preview_6.png) | ![preview 7](10/preview_7.png) | ![preview 8](10/preview_8.png) | | 11 | 21 | [Download](11/dataset.zip) | ![preview 1](11/preview_1.png) | ![preview 2](11/preview_2.png) | ![preview 3](11/preview_3.png) | ![preview 4](11/preview_4.png) | ![preview 5](11/preview_5.png) | ![preview 6](11/preview_6.png) | ![preview 7](11/preview_7.png) | ![preview 8](11/preview_8.png) | | 12 | 51 | [Download](12/dataset.zip) | ![preview 1](12/preview_1.png) | ![preview 2](12/preview_2.png) | ![preview 3](12/preview_3.png) | ![preview 4](12/preview_4.png) | ![preview 5](12/preview_5.png) | ![preview 6](12/preview_6.png) | ![preview 7](12/preview_7.png) | ![preview 8](12/preview_8.png) | | 13 | 97 | [Download](13/dataset.zip) | ![preview 1](13/preview_1.png) | ![preview 2](13/preview_2.png) | ![preview 3](13/preview_3.png) | ![preview 4](13/preview_4.png) | ![preview 5](13/preview_5.png) | ![preview 6](13/preview_6.png) | ![preview 7](13/preview_7.png) | ![preview 8](13/preview_8.png) | | 14 | 458 | [Download](14/dataset.zip) | ![preview 1](14/preview_1.png) | ![preview 2](14/preview_2.png) | ![preview 3](14/preview_3.png) | ![preview 4](14/preview_4.png) | ![preview 5](14/preview_5.png) | ![preview 6](14/preview_6.png) | ![preview 7](14/preview_7.png) | ![preview 8](14/preview_8.png) | | 15 | 575 | [Download](15/dataset.zip) | ![preview 1](15/preview_1.png) | ![preview 2](15/preview_2.png) | ![preview 3](15/preview_3.png) | ![preview 4](15/preview_4.png) | ![preview 5](15/preview_5.png) | ![preview 6](15/preview_6.png) | ![preview 7](15/preview_7.png) | ![preview 8](15/preview_8.png) | | 16 | 19 | [Download](16/dataset.zip) | ![preview 1](16/preview_1.png) | ![preview 2](16/preview_2.png) | ![preview 3](16/preview_3.png) | ![preview 4](16/preview_4.png) | ![preview 5](16/preview_5.png) | ![preview 6](16/preview_6.png) | ![preview 7](16/preview_7.png) | ![preview 8](16/preview_8.png) | | 17 | 24 | [Download](17/dataset.zip) | ![preview 1](17/preview_1.png) | ![preview 2](17/preview_2.png) | ![preview 3](17/preview_3.png) | ![preview 4](17/preview_4.png) | ![preview 5](17/preview_5.png) | ![preview 6](17/preview_6.png) | ![preview 7](17/preview_7.png) | ![preview 8](17/preview_8.png) | | 18 | 32 | [Download](18/dataset.zip) | ![preview 1](18/preview_1.png) | ![preview 2](18/preview_2.png) | ![preview 3](18/preview_3.png) | ![preview 4](18/preview_4.png) | ![preview 5](18/preview_5.png) | ![preview 6](18/preview_6.png) | ![preview 7](18/preview_7.png) | ![preview 8](18/preview_8.png) | | 19 | 11 | [Download](19/dataset.zip) | ![preview 1](19/preview_1.png) | ![preview 2](19/preview_2.png) | ![preview 3](19/preview_3.png) | ![preview 4](19/preview_4.png) | ![preview 5](19/preview_5.png) | ![preview 6](19/preview_6.png) | ![preview 7](19/preview_7.png) | ![preview 8](19/preview_8.png) | | 20 | 852 | [Download](20/dataset.zip) | ![preview 1](20/preview_1.png) | ![preview 2](20/preview_2.png) | ![preview 3](20/preview_3.png) | ![preview 4](20/preview_4.png) | ![preview 5](20/preview_5.png) | ![preview 6](20/preview_6.png) | ![preview 7](20/preview_7.png) | ![preview 8](20/preview_8.png) | | 21 | 43 | [Download](21/dataset.zip) | ![preview 1](21/preview_1.png) | ![preview 2](21/preview_2.png) | ![preview 3](21/preview_3.png) | ![preview 4](21/preview_4.png) | ![preview 5](21/preview_5.png) | ![preview 6](21/preview_6.png) | ![preview 7](21/preview_7.png) | ![preview 8](21/preview_8.png) | | 22 | 16 | [Download](22/dataset.zip) | ![preview 1](22/preview_1.png) | ![preview 2](22/preview_2.png) | ![preview 3](22/preview_3.png) | ![preview 4](22/preview_4.png) | ![preview 5](22/preview_5.png) | ![preview 6](22/preview_6.png) | ![preview 7](22/preview_7.png) | ![preview 8](22/preview_8.png) | | 23 | 9 | [Download](23/dataset.zip) | ![preview 1](23/preview_1.png) | ![preview 2](23/preview_2.png) | ![preview 3](23/preview_3.png) | ![preview 4](23/preview_4.png) | ![preview 5](23/preview_5.png) | ![preview 6](23/preview_6.png) | ![preview 7](23/preview_7.png) | ![preview 8](23/preview_8.png) | | 24 | 21 | [Download](24/dataset.zip) | ![preview 1](24/preview_1.png) | ![preview 2](24/preview_2.png) | ![preview 3](24/preview_3.png) | ![preview 4](24/preview_4.png) | ![preview 5](24/preview_5.png) | ![preview 6](24/preview_6.png) | ![preview 7](24/preview_7.png) | ![preview 8](24/preview_8.png) | | 25 | 42 | [Download](25/dataset.zip) | ![preview 1](25/preview_1.png) | ![preview 2](25/preview_2.png) | ![preview 3](25/preview_3.png) | ![preview 4](25/preview_4.png) | ![preview 5](25/preview_5.png) | ![preview 6](25/preview_6.png) | ![preview 7](25/preview_7.png) | ![preview 8](25/preview_8.png) | | 26 | 201 | [Download](26/dataset.zip) | ![preview 1](26/preview_1.png) | ![preview 2](26/preview_2.png) | ![preview 3](26/preview_3.png) | ![preview 4](26/preview_4.png) | ![preview 5](26/preview_5.png) | ![preview 6](26/preview_6.png) | ![preview 7](26/preview_7.png) | ![preview 8](26/preview_8.png) | | 27 | 13 | [Download](27/dataset.zip) | ![preview 1](27/preview_1.png) | ![preview 2](27/preview_2.png) | ![preview 3](27/preview_3.png) | ![preview 4](27/preview_4.png) | ![preview 5](27/preview_5.png) | ![preview 6](27/preview_6.png) | ![preview 7](27/preview_7.png) | ![preview 8](27/preview_8.png) | | 28 | 11 | [Download](28/dataset.zip) | ![preview 1](28/preview_1.png) | ![preview 2](28/preview_2.png) | ![preview 3](28/preview_3.png) | ![preview 4](28/preview_4.png) | ![preview 5](28/preview_5.png) | ![preview 6](28/preview_6.png) | ![preview 7](28/preview_7.png) | ![preview 8](28/preview_8.png) | | 29 | 85 | [Download](29/dataset.zip) | ![preview 1](29/preview_1.png) | ![preview 2](29/preview_2.png) | ![preview 3](29/preview_3.png) | ![preview 4](29/preview_4.png) | ![preview 5](29/preview_5.png) | ![preview 6](29/preview_6.png) | ![preview 7](29/preview_7.png) | ![preview 8](29/preview_8.png) | | 30 | 33 | [Download](30/dataset.zip) | ![preview 1](30/preview_1.png) | ![preview 2](30/preview_2.png) | ![preview 3](30/preview_3.png) | ![preview 4](30/preview_4.png) | ![preview 5](30/preview_5.png) | ![preview 6](30/preview_6.png) | ![preview 7](30/preview_7.png) | ![preview 8](30/preview_8.png) | | 31 | 31 | [Download](31/dataset.zip) | ![preview 1](31/preview_1.png) | ![preview 2](31/preview_2.png) | ![preview 3](31/preview_3.png) | ![preview 4](31/preview_4.png) | ![preview 5](31/preview_5.png) | ![preview 6](31/preview_6.png) | ![preview 7](31/preview_7.png) | ![preview 8](31/preview_8.png) | | 32 | 21 | [Download](32/dataset.zip) | ![preview 1](32/preview_1.png) | ![preview 2](32/preview_2.png) | ![preview 3](32/preview_3.png) | ![preview 4](32/preview_4.png) | ![preview 5](32/preview_5.png) | ![preview 6](32/preview_6.png) | ![preview 7](32/preview_7.png) | ![preview 8](32/preview_8.png) | | 33 | 91 | [Download](33/dataset.zip) | ![preview 1](33/preview_1.png) | ![preview 2](33/preview_2.png) | ![preview 3](33/preview_3.png) | ![preview 4](33/preview_4.png) | ![preview 5](33/preview_5.png) | ![preview 6](33/preview_6.png) | ![preview 7](33/preview_7.png) | ![preview 8](33/preview_8.png) | | 34 | 13 | [Download](34/dataset.zip) | ![preview 1](34/preview_1.png) | ![preview 2](34/preview_2.png) | ![preview 3](34/preview_3.png) | ![preview 4](34/preview_4.png) | ![preview 5](34/preview_5.png) | ![preview 6](34/preview_6.png) | ![preview 7](34/preview_7.png) | ![preview 8](34/preview_8.png) | | 35 | 10 | [Download](35/dataset.zip) | ![preview 1](35/preview_1.png) | ![preview 2](35/preview_2.png) | ![preview 3](35/preview_3.png) | ![preview 4](35/preview_4.png) | ![preview 5](35/preview_5.png) | ![preview 6](35/preview_6.png) | ![preview 7](35/preview_7.png) | ![preview 8](35/preview_8.png) | | 36 | 8 | [Download](36/dataset.zip) | ![preview 1](36/preview_1.png) | ![preview 2](36/preview_2.png) | ![preview 3](36/preview_3.png) | ![preview 4](36/preview_4.png) | ![preview 5](36/preview_5.png) | ![preview 6](36/preview_6.png) | ![preview 7](36/preview_7.png) | ![preview 8](36/preview_8.png) | | noise | 59 | [Download](-1/dataset.zip) | ![preview 1](-1/preview_1.png) | ![preview 2](-1/preview_2.png) | ![preview 3](-1/preview_3.png) | ![preview 4](-1/preview_4.png) | ![preview 5](-1/preview_5.png) | ![preview 6](-1/preview_6.png) | ![preview 7](-1/preview_7.png) | ![preview 8](-1/preview_8.png) |
BangumiBase/rurounikenshin2023
[ "size_categories:1K<n<10K", "license:mit", "art", "region:us" ]
2023-10-08T15:05:16+00:00
{"license": "mit", "size_categories": ["1K<n<10K"], "tags": ["art"]}
2023-11-11T20:14:35+00:00
[]
[]
TAGS #size_categories-1K<n<10K #license-mit #art #region-us
Bangumi Image Base of Rurouni Kenshin (2023) ============================================ This is the image base of bangumi Rurouni Kenshin (2023), we detected 38 characters, 4087 images in total. The full dataset is here. Please note that these image bases are not guaranteed to be 100% cleaned, they may be noisy actual. If you intend to manually train models using this dataset, we recommend performing necessary preprocessing on the downloaded dataset to eliminate potential noisy samples (approximately 1% probability). Here is the characters' preview:
[]
[ "TAGS\n#size_categories-1K<n<10K #license-mit #art #region-us \n" ]
[ 25 ]
[ "passage: TAGS\n#size_categories-1K<n<10K #license-mit #art #region-us \n" ]
f3459b0369f9e40fb29865d9238ad8b14a8039c9
### <span style="color:#3560B0; font-weight: bold;">Python Codes - 30k examples, Llama1&2 tokenized dataset</span> ![License](https://img.shields.io/badge/License-llama2-brightgreen) ![Language](https://img.shields.io/badge/Language-English-blue) ![Size](https://img.shields.io/badge/Size-10M<n<100M-orange) ### <span style="color:#3560B0; font-weight: bold;">Author</span> **<span style="color:#266090;">FlyTech</span>** <span style="color:#3560B0"></br>For general guide on how to create, quantize, merge or inference the model and more, visit:</span> <a href="https://hackmd.io/@swearek/rJYVR_-7a" target="_blank">hackmd.io/my_first_ai</a> ### <span style="color:#3560B0; font-weight: bold;">Overview</span> <span style="color:#266090">This dataset serves as a rich resource for various Natural Language Processing tasks such as:</span> - <span style="color:#E91E63;">Question Answering</span> - <span style="color:#8BC34A;">Text Generation</span> - <span style="color:#FFC107;">Text-to-Text Generation</span> <b><span style="color:#266090">It primarily focuses on instructional tasks in Python, tokenized specifically for the Llama architecture. The dataset is a blend of GPT-4 generated content, custom codes, behavioral approaches and tasks extending beyond Python.</span></b> <hr style="height:1px;border:none;color:#333;background-color:#136;" /> ### <span style="color:#A45356; font-weight: bold;">IMPORTANT!</span> <b><span style="color:#A8A8C9; background-color: #153055"> The llama-python-codes-30k dataset is not cleaned. It has a very low number of unique input entries.</br> For the fully cleaned version of the dataset, detokenized and with filtered-out input entries, please refer to this link: </span></b> <a href="https://huggingface.co/datasets/flytech/python-codes-25k" style="color:#356090">flytech/python-codes-25k</a> <hr style="height:1px;border:none;color:#333;background-color:#136;" /> ### <span style="color:#3560B0; font-weight: bold;">Dataset Metrics</span> **<span style="color:#3560B0;">Token Count (via LlamaTokenizer)</span>** - **<span style="color:#4CAF50;">Maximum</span>: 508** - **<span style="color:#2196F3;">Average</span>: 158.06** - **<span style="color:#F44336;">Total</span>: 13,993,984** **<span style="color:#006688;">Word Count</span>: 1,890,810** **<span style="color:#006688;">Number of Examples</span>: 27,331** ### <b><span style="color:#3560B0; font-weight: bold;">Usage</span></b> ```python from datasets import load_dataset dataset = load_dataset('flytech/llama-python-codes-30k', split='train') # One can map the dataset in any way, for the sake of example: dataset = dataset.map(lambda example: {'text': example['instruction'] + ' ' + example['input'] + ' ' + example['output']})['text'] ``` ### <span style="color:#607D8B; font-weight: bold;">License</span> This dataset is under the `llama2` license. <hr style="height:1px;border:none;color:#333;background-color:#136;" /> ### CONTRIBUTIONS ```python # All contributions to the repository are welcome. # Feel free to use the dataset for the Llama models, # or visit: ``` <a href="https://huggingface.co/datasets/flytech/python-codes-25k" style="color:#356090">flytech/python-codes-25k</a> ```python # To preprocess and tokenize the dataset as per your model requirements! ``` ### <span style="color:#266090; font-weight: bold;">Tags</span> - `code` - `python` - `instruct` - `flytech`
flytech/llama-python-codes-30k
[ "task_categories:question-answering", "task_categories:text-generation", "task_categories:text2text-generation", "size_categories:10M<n<100M", "language:en", "license:llama2", "code", "python", "instruct", "llama", "flytech", "region:us" ]
2023-10-08T15:10:50+00:00
{"language": ["en"], "license": "llama2", "size_categories": ["10M<n<100M"], "task_categories": ["question-answering", "text-generation", "text2text-generation"], "pretty_name": "Llama1/2 Python Codes 30k Tokenized", "author": "FlyTech", "tags": ["code", "python", "instruct", "llama", "flytech"]}
2023-11-05T16:39:12+00:00
[]
[ "en" ]
TAGS #task_categories-question-answering #task_categories-text-generation #task_categories-text2text-generation #size_categories-10M<n<100M #language-English #license-llama2 #code #python #instruct #llama #flytech #region-us
### <span style="color:#3560B0; font-weight: bold;">Python Codes - 30k examples, Llama1&2 tokenized dataset</span> !License !Language !Size ### <span style="color:#3560B0; font-weight: bold;">Author</span> <span style="color:#266090;">FlyTech</span> <span style="color:#3560B0"></br>For general guide on how to create, quantize, merge or inference the model and more, visit:</span> <a href="URL target="_blank">URL ### <span style="color:#3560B0; font-weight: bold;">Overview</span> <span style="color:#266090">This dataset serves as a rich resource for various Natural Language Processing tasks such as:</span> - <span style="color:#E91E63;">Question Answering</span> - <span style="color:#8BC34A;">Text Generation</span> - <span style="color:#FFC107;">Text-to-Text Generation</span> <b><span style="color:#266090">It primarily focuses on instructional tasks in Python, tokenized specifically for the Llama architecture. The dataset is a blend of GPT-4 generated content, custom codes, behavioral approaches and tasks extending beyond Python.</span></b> <hr style="height:1px;border:none;color:#333;background-color:#136;" /> ### <span style="color:#A45356; font-weight: bold;">IMPORTANT!</span> <b><span style="color:#A8A8C9; background-color: #153055"> The llama-python-codes-30k dataset is not cleaned. It has a very low number of unique input entries.</br> For the fully cleaned version of the dataset, detokenized and with filtered-out input entries, please refer to this link: </span></b> <a href="URL style="color:#356090">flytech/python-codes-25k</a> <hr style="height:1px;border:none;color:#333;background-color:#136;" /> ### <span style="color:#3560B0; font-weight: bold;">Dataset Metrics</span> <span style="color:#3560B0;">Token Count (via LlamaTokenizer)</span> - <span style="color:#4CAF50;">Maximum</span>: 508 - <span style="color:#2196F3;">Average</span>: 158.06 - <span style="color:#F44336;">Total</span>: 13,993,984 <span style="color:#006688;">Word Count</span>: 1,890,810 <span style="color:#006688;">Number of Examples</span>: 27,331 ### <b><span style="color:#3560B0; font-weight: bold;">Usage</span></b> ### <span style="color:#607D8B; font-weight: bold;">License</span> This dataset is under the 'llama2' license. <hr style="height:1px;border:none;color:#333;background-color:#136;" /> ### CONTRIBUTIONS <a href="URL style="color:#356090">flytech/python-codes-25k</a> ### <span style="color:#266090; font-weight: bold;">Tags</span> - 'code' - 'python' - 'instruct' - 'flytech'
[ "### <span style=\"color:#3560B0; font-weight: bold;\">Python Codes - 30k examples, Llama1&2 tokenized dataset</span>\n\n!License\n!Language\n!Size", "### <span style=\"color:#3560B0; font-weight: bold;\">Author</span>\n\n<span style=\"color:#266090;\">FlyTech</span>\n<span style=\"color:#3560B0\"></br>For general guide on how to create, quantize, merge or inference the model and more, visit:</span> \n<a href=\"URL target=\"_blank\">URL", "### <span style=\"color:#3560B0; font-weight: bold;\">Overview</span>\n\n<span style=\"color:#266090\">This dataset serves as a rich resource for various Natural Language Processing tasks such as:</span>\n\n- <span style=\"color:#E91E63;\">Question Answering</span>\n- <span style=\"color:#8BC34A;\">Text Generation</span>\n- <span style=\"color:#FFC107;\">Text-to-Text Generation</span>\n\n<b><span style=\"color:#266090\">It primarily focuses on instructional tasks in Python, tokenized specifically for the Llama architecture.\nThe dataset is a blend of GPT-4 generated content, custom codes, behavioral approaches and tasks extending beyond Python.</span></b>\n\n<hr style=\"height:1px;border:none;color:#333;background-color:#136;\" />", "### <span style=\"color:#A45356; font-weight: bold;\">IMPORTANT!</span>\n\n<b><span style=\"color:#A8A8C9; background-color: #153055\">\nThe llama-python-codes-30k dataset is not cleaned. \nIt has a very low number of unique input entries.</br>\nFor the fully cleaned version of the dataset, detokenized and with filtered-out input entries,\nplease refer to this link:\n</span></b>\n\n<a href=\"URL style=\"color:#356090\">flytech/python-codes-25k</a>\n\n\n<hr style=\"height:1px;border:none;color:#333;background-color:#136;\" />", "### <span style=\"color:#3560B0; font-weight: bold;\">Dataset Metrics</span>\n\n<span style=\"color:#3560B0;\">Token Count (via LlamaTokenizer)</span>\n\n- <span style=\"color:#4CAF50;\">Maximum</span>: 508\n- <span style=\"color:#2196F3;\">Average</span>: 158.06\n- <span style=\"color:#F44336;\">Total</span>: 13,993,984\n\n<span style=\"color:#006688;\">Word Count</span>: 1,890,810 \n<span style=\"color:#006688;\">Number of Examples</span>: 27,331", "### <b><span style=\"color:#3560B0; font-weight: bold;\">Usage</span></b>", "### <span style=\"color:#607D8B; font-weight: bold;\">License</span>\n\nThis dataset is under the 'llama2' license.\n\n<hr style=\"height:1px;border:none;color:#333;background-color:#136;\" />", "### CONTRIBUTIONS\n\n\n<a href=\"URL style=\"color:#356090\">flytech/python-codes-25k</a>", "### <span style=\"color:#266090; font-weight: bold;\">Tags</span>\n\n- 'code'\n- 'python'\n- 'instruct'\n- 'flytech'" ]
[ "TAGS\n#task_categories-question-answering #task_categories-text-generation #task_categories-text2text-generation #size_categories-10M<n<100M #language-English #license-llama2 #code #python #instruct #llama #flytech #region-us \n", "### <span style=\"color:#3560B0; font-weight: bold;\">Python Codes - 30k examples, Llama1&2 tokenized dataset</span>\n\n!License\n!Language\n!Size", "### <span style=\"color:#3560B0; font-weight: bold;\">Author</span>\n\n<span style=\"color:#266090;\">FlyTech</span>\n<span style=\"color:#3560B0\"></br>For general guide on how to create, quantize, merge or inference the model and more, visit:</span> \n<a href=\"URL target=\"_blank\">URL", "### <span style=\"color:#3560B0; font-weight: bold;\">Overview</span>\n\n<span style=\"color:#266090\">This dataset serves as a rich resource for various Natural Language Processing tasks such as:</span>\n\n- <span style=\"color:#E91E63;\">Question Answering</span>\n- <span style=\"color:#8BC34A;\">Text Generation</span>\n- <span style=\"color:#FFC107;\">Text-to-Text Generation</span>\n\n<b><span style=\"color:#266090\">It primarily focuses on instructional tasks in Python, tokenized specifically for the Llama architecture.\nThe dataset is a blend of GPT-4 generated content, custom codes, behavioral approaches and tasks extending beyond Python.</span></b>\n\n<hr style=\"height:1px;border:none;color:#333;background-color:#136;\" />", "### <span style=\"color:#A45356; font-weight: bold;\">IMPORTANT!</span>\n\n<b><span style=\"color:#A8A8C9; background-color: #153055\">\nThe llama-python-codes-30k dataset is not cleaned. \nIt has a very low number of unique input entries.</br>\nFor the fully cleaned version of the dataset, detokenized and with filtered-out input entries,\nplease refer to this link:\n</span></b>\n\n<a href=\"URL style=\"color:#356090\">flytech/python-codes-25k</a>\n\n\n<hr style=\"height:1px;border:none;color:#333;background-color:#136;\" />", "### <span style=\"color:#3560B0; font-weight: bold;\">Dataset Metrics</span>\n\n<span style=\"color:#3560B0;\">Token Count (via LlamaTokenizer)</span>\n\n- <span style=\"color:#4CAF50;\">Maximum</span>: 508\n- <span style=\"color:#2196F3;\">Average</span>: 158.06\n- <span style=\"color:#F44336;\">Total</span>: 13,993,984\n\n<span style=\"color:#006688;\">Word Count</span>: 1,890,810 \n<span style=\"color:#006688;\">Number of Examples</span>: 27,331", "### <b><span style=\"color:#3560B0; font-weight: bold;\">Usage</span></b>", "### <span style=\"color:#607D8B; font-weight: bold;\">License</span>\n\nThis dataset is under the 'llama2' license.\n\n<hr style=\"height:1px;border:none;color:#333;background-color:#136;\" />", "### CONTRIBUTIONS\n\n\n<a href=\"URL style=\"color:#356090\">flytech/python-codes-25k</a>", "### <span style=\"color:#266090; font-weight: bold;\">Tags</span>\n\n- 'code'\n- 'python'\n- 'instruct'\n- 'flytech'" ]
[ 79, 56, 98, 226, 178, 172, 32, 70, 33, 45 ]
[ "passage: TAGS\n#task_categories-question-answering #task_categories-text-generation #task_categories-text2text-generation #size_categories-10M<n<100M #language-English #license-llama2 #code #python #instruct #llama #flytech #region-us \n### <span style=\"color:#3560B0; font-weight: bold;\">Python Codes - 30k examples, Llama1&2 tokenized dataset</span>\n\n!License\n!Language\n!Size### <span style=\"color:#3560B0; font-weight: bold;\">Author</span>\n\n<span style=\"color:#266090;\">FlyTech</span>\n<span style=\"color:#3560B0\"></br>For general guide on how to create, quantize, merge or inference the model and more, visit:</span> \n<a href=\"URL target=\"_blank\">URL### <span style=\"color:#3560B0; font-weight: bold;\">Overview</span>\n\n<span style=\"color:#266090\">This dataset serves as a rich resource for various Natural Language Processing tasks such as:</span>\n\n- <span style=\"color:#E91E63;\">Question Answering</span>\n- <span style=\"color:#8BC34A;\">Text Generation</span>\n- <span style=\"color:#FFC107;\">Text-to-Text Generation</span>\n\n<b><span style=\"color:#266090\">It primarily focuses on instructional tasks in Python, tokenized specifically for the Llama architecture.\nThe dataset is a blend of GPT-4 generated content, custom codes, behavioral approaches and tasks extending beyond Python.</span></b>\n\n<hr style=\"height:1px;border:none;color:#333;background-color:#136;\" />" ]
a44a9a94b0d27817d2a2211a9ce6dee04df07ac1
# Dataset Card for Evaluation run of Charlie911/vicuna-7b-v1.5-lora-mixed-datasets-time-unit ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/Charlie911/vicuna-7b-v1.5-lora-mixed-datasets-time-unit - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [Charlie911/vicuna-7b-v1.5-lora-mixed-datasets-time-unit](https://huggingface.co/Charlie911/vicuna-7b-v1.5-lora-mixed-datasets-time-unit) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Charlie911__vicuna-7b-v1.5-lora-mixed-datasets-time-unit", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-28T09:09:00.701109](https://huggingface.co/datasets/open-llm-leaderboard/details_Charlie911__vicuna-7b-v1.5-lora-mixed-datasets-time-unit/blob/main/results_2023-10-28T09-09-00.701109.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.004928691275167785, "em_stderr": 0.0007171872517059817, "f1": 0.06610213926174507, "f1_stderr": 0.001553905671666344, "acc": 0.4026417372707673, "acc_stderr": 0.009752392640502771 }, "harness|drop|3": { "em": 0.004928691275167785, "em_stderr": 0.0007171872517059817, "f1": 0.06610213926174507, "f1_stderr": 0.001553905671666344 }, "harness|gsm8k|5": { "acc": 0.0712661106899166, "acc_stderr": 0.007086462127954495 }, "harness|winogrande|5": { "acc": 0.734017363851618, "acc_stderr": 0.012418323153051046 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_Charlie911__vicuna-7b-v1.5-lora-mixed-datasets-time-unit
[ "region:us" ]
2023-10-08T15:13:43+00:00
{"pretty_name": "Evaluation run of Charlie911/vicuna-7b-v1.5-lora-mixed-datasets-time-unit", "dataset_summary": "Dataset automatically created during the evaluation run of model [Charlie911/vicuna-7b-v1.5-lora-mixed-datasets-time-unit](https://huggingface.co/Charlie911/vicuna-7b-v1.5-lora-mixed-datasets-time-unit) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Charlie911__vicuna-7b-v1.5-lora-mixed-datasets-time-unit\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-28T09:09:00.701109](https://huggingface.co/datasets/open-llm-leaderboard/details_Charlie911__vicuna-7b-v1.5-lora-mixed-datasets-time-unit/blob/main/results_2023-10-28T09-09-00.701109.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.004928691275167785,\n \"em_stderr\": 0.0007171872517059817,\n \"f1\": 0.06610213926174507,\n \"f1_stderr\": 0.001553905671666344,\n \"acc\": 0.4026417372707673,\n \"acc_stderr\": 0.009752392640502771\n },\n \"harness|drop|3\": {\n \"em\": 0.004928691275167785,\n \"em_stderr\": 0.0007171872517059817,\n \"f1\": 0.06610213926174507,\n \"f1_stderr\": 0.001553905671666344\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0712661106899166,\n \"acc_stderr\": 0.007086462127954495\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.734017363851618,\n \"acc_stderr\": 0.012418323153051046\n }\n}\n```", "repo_url": "https://huggingface.co/Charlie911/vicuna-7b-v1.5-lora-mixed-datasets-time-unit", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|arc:challenge|25_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_28T09_09_00.701109", "path": ["**/details_harness|drop|3_2023-10-28T09-09-00.701109.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-28T09-09-00.701109.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_28T09_09_00.701109", "path": ["**/details_harness|gsm8k|5_2023-10-28T09-09-00.701109.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-28T09-09-00.701109.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hellaswag|10_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T16-13-20.175189.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T16-13-20.175189.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T16-13-20.175189.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_28T09_09_00.701109", "path": ["**/details_harness|winogrande|5_2023-10-28T09-09-00.701109.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-28T09-09-00.701109.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_08T16_13_20.175189", "path": ["results_2023-10-08T16-13-20.175189.parquet"]}, {"split": "2023_10_28T09_09_00.701109", "path": ["results_2023-10-28T09-09-00.701109.parquet"]}, {"split": "latest", "path": ["results_2023-10-28T09-09-00.701109.parquet"]}]}]}
2023-10-28T08:09:12+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Charlie911/vicuna-7b-v1.5-lora-mixed-datasets-time-unit ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model Charlie911/vicuna-7b-v1.5-lora-mixed-datasets-time-unit on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-28T09:09:00.701109(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of Charlie911/vicuna-7b-v1.5-lora-mixed-datasets-time-unit", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Charlie911/vicuna-7b-v1.5-lora-mixed-datasets-time-unit on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-28T09:09:00.701109(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Charlie911/vicuna-7b-v1.5-lora-mixed-datasets-time-unit", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Charlie911/vicuna-7b-v1.5-lora-mixed-datasets-time-unit on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-28T09:09:00.701109(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 34, 31, 182, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Charlie911/vicuna-7b-v1.5-lora-mixed-datasets-time-unit## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model Charlie911/vicuna-7b-v1.5-lora-mixed-datasets-time-unit on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-28T09:09:00.701109(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
ae42d3f9fa8260a4eec1fc6da1475f26d709b4b9
# Dataset Card for "Mainspacehubdata" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
AryanNsc/Mainspacehubdata
[ "region:us" ]
2023-10-08T15:17:06+00:00
{"dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 10911, "num_examples": 39}], "download_size": 8319, "dataset_size": 10911}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-08T15:42:43+00:00
[]
[]
TAGS #region-us
# Dataset Card for "Mainspacehubdata" More Information needed
[ "# Dataset Card for \"Mainspacehubdata\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"Mainspacehubdata\"\n\nMore Information needed" ]
[ 6, 14 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"Mainspacehubdata\"\n\nMore Information needed" ]
ad04db93fc7a416f12153356f90c724ea351999f
# Dataset Card for Evaluation run of LeoLM/leo-hessianai-7b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/LeoLM/leo-hessianai-7b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [LeoLM/leo-hessianai-7b](https://huggingface.co/LeoLM/leo-hessianai-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_LeoLM__leo-hessianai-7b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-25T10:03:23.884304](https://huggingface.co/datasets/open-llm-leaderboard/details_LeoLM__leo-hessianai-7b/blob/main/results_2023-10-25T10-03-23.884304.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0009437919463087249, "em_stderr": 0.0003144653119413205, "f1": 0.056075922818791854, "f1_stderr": 0.0013232326016856207, "acc": 0.38874610827245293, "acc_stderr": 0.009469282540407879 }, "harness|drop|3": { "em": 0.0009437919463087249, "em_stderr": 0.0003144653119413205, "f1": 0.056075922818791854, "f1_stderr": 0.0013232326016856207 }, "harness|gsm8k|5": { "acc": 0.056103108415466264, "acc_stderr": 0.006338668431321877 }, "harness|winogrande|5": { "acc": 0.7213891081294396, "acc_stderr": 0.01259989664949388 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_LeoLM__leo-hessianai-7b
[ "region:us" ]
2023-10-08T16:16:38+00:00
{"pretty_name": "Evaluation run of LeoLM/leo-hessianai-7b", "dataset_summary": "Dataset automatically created during the evaluation run of model [LeoLM/leo-hessianai-7b](https://huggingface.co/LeoLM/leo-hessianai-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_LeoLM__leo-hessianai-7b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-25T10:03:23.884304](https://huggingface.co/datasets/open-llm-leaderboard/details_LeoLM__leo-hessianai-7b/blob/main/results_2023-10-25T10-03-23.884304.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0009437919463087249,\n \"em_stderr\": 0.0003144653119413205,\n \"f1\": 0.056075922818791854,\n \"f1_stderr\": 0.0013232326016856207,\n \"acc\": 0.38874610827245293,\n \"acc_stderr\": 0.009469282540407879\n },\n \"harness|drop|3\": {\n \"em\": 0.0009437919463087249,\n \"em_stderr\": 0.0003144653119413205,\n \"f1\": 0.056075922818791854,\n \"f1_stderr\": 0.0013232326016856207\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.056103108415466264,\n \"acc_stderr\": 0.006338668431321877\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7213891081294396,\n \"acc_stderr\": 0.01259989664949388\n }\n}\n```", "repo_url": "https://huggingface.co/LeoLM/leo-hessianai-7b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|arc:challenge|25_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_25T10_03_23.884304", "path": ["**/details_harness|drop|3_2023-10-25T10-03-23.884304.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-25T10-03-23.884304.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_25T10_03_23.884304", "path": ["**/details_harness|gsm8k|5_2023-10-25T10-03-23.884304.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-25T10-03-23.884304.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hellaswag|10_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T17-16-14.181420.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T17-16-14.181420.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T17-16-14.181420.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_25T10_03_23.884304", "path": ["**/details_harness|winogrande|5_2023-10-25T10-03-23.884304.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-25T10-03-23.884304.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_08T17_16_14.181420", "path": ["results_2023-10-08T17-16-14.181420.parquet"]}, {"split": "2023_10_25T10_03_23.884304", "path": ["results_2023-10-25T10-03-23.884304.parquet"]}, {"split": "latest", "path": ["results_2023-10-25T10-03-23.884304.parquet"]}]}]}
2023-10-25T09:03:36+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of LeoLM/leo-hessianai-7b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model LeoLM/leo-hessianai-7b on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-25T10:03:23.884304(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of LeoLM/leo-hessianai-7b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model LeoLM/leo-hessianai-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-25T10:03:23.884304(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of LeoLM/leo-hessianai-7b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model LeoLM/leo-hessianai-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-25T10:03:23.884304(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 19, 31, 167, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of LeoLM/leo-hessianai-7b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model LeoLM/leo-hessianai-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-25T10:03:23.884304(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
c3bcb8ec6293dd72e1a14265c162a90a86060e99
# Scoring popular datasets with ["Self-Alignment with Instruction Backtranslation"](https://arxiv.org/abs/2308.06259) prompt ## Scoring Models used - gpt-3.5-turbo-16k - gpt-3.5-turbo-1106 ## All datasets have 2 additional columns - score - Response from the model including CoT (if provided) - extracted_score - Extracted score from the score column as int ## Datasets Scored by Prompt #### Original Score Prompt from paper - [airoboros-2.1](https://huggingface.co/datasets/jondurbin/airoboros-2.1) - [alpaca-gpt4](https://huggingface.co/datasets/vicgalle/alpaca-gpt4) - [dolphin](https://huggingface.co/datasets/cognitivecomputations/dolphin) - Only GPT-4 responses (flan1m-alpaca-uncensored-deduped.jsonl) - [open-platypus](https://huggingface.co/datasets/garage-bAInd/Open-Platypus) - [orca_mini_v1](https://huggingface.co/datasets/pankajmathur/orca_mini_v1_dataset) - [SlimOrca-Dedup](https://huggingface.co/datasets/Open-Orca/SlimOrca-Dedup) - [Synthia-1.3](https://huggingface.co/datasets/migtissera/Synthia-v1.3) - [wizard_alpaca_dolly_orca](https://huggingface.co/datasets/nRuaif/wizard_alpaca_dolly_orca) #### Conversation Score Prompt (Modified) - [Capybara](https://huggingface.co/datasets/LDJnr/Capybara) - [ultrachat](https://huggingface.co/datasets/HuggingFaceH4/ultrachat_200k) ## Score Breakdown | Dataset | 5 | 4 | 3 | 2 | 1 | 0 | |-------------------------|----------:|----------:|----------:|----------:|----------:|----------:| | dolphin | 80.232373 | 10.841314 | 2.217159 | 3.075088 | 3.63371 | 0.000356 | | open-platypus | 76.390115 | 10.779909 | 3.093156 | 3.558533 | 6.178288 | 0 | | Capybara | 73.57241 | 12.851431 | 3.005123 | 4.117206 | 6.435087 | 0.018743 | | airoboros-2.1 | 69.869994 | 26.695312 | 1.322096 | 1.076957 | 1.035641 | 0 | | alpaca-gpt4 | 65.421891 | 31.797554 | 1.301823 | 0.824937 | 0.653796 | 0 | | wizard_alpaca_dolly_orca| 63.898674 | 32.68317 | 1.752752 | 0.894614 | 0.769829 | 0.00096 | | ultrachat | 50.213948 | 40.684169 | 5.741387 | 2.880979 | 0.478934 | 0.000582 | | orca_mini_v1 | 46.351518 | 49.313846 | 1.568606 | 1.898745 | 0.867284 | 0 | | Synthia-v1.3 | 39.262214 | 52.335033 | 2.627859 | 3.38096 | 2.392252 | 0.001683 | | SlimOrca-Dedup | 29.987262 | 55.132314 | 7.122872 | 2.998424 | 4.759127 | 0 | ## Prompts #### Original Score Prompt from paper ``` Below is an instruction from an user and a candidate answer. Evaluate whether or not the answer is a good example of how AI Assistant should respond to the user’s instruction. Please assign a score using the following 5-point scale: 1: It means the answer is incomplete, vague, off-topic, controversial, or not exactly what the user asked for. For example, some content seems missing, numbered list does not start from the beginning, the opening sentence repeats user’s question. Or the response is from another person’s perspective with their personal experience (e.g. taken from blog posts), or looks like an answer from a forum. Or it contains promotional text, navigation text, or other irrelevant information. 2: It means the answer addresses most of the asks from the user. It does not directly address the user’s question. For example, it only provides a high-level methodology instead of the exact solution to user’s question. 3: It means the answer is helpful but not written by an AI Assistant. It addresses all the basic asks from the user. It is complete and self contained with the drawback that the response is not written from an AI assistant’s perspective, but from other people’s perspective. The content looks like an excerpt from a blog post, web page, or web search results. For example, it contains personal experience or opinion, mentions comments section, or share on social media, etc. 4: It means the answer is written from an AI assistant’s perspective with a clear focus of addressing the instruction. It provide a complete, clear, and comprehensive response to user’s question or instruction without missing or irrelevant information. It is well organized, self-contained, and written in a helpful tone. It has minor room for improvement, e.g. more concise and focused. 5: It means it is a perfect answer from an AI Assistant. It has a clear focus on being a helpful AI Assistant, where the response looks like intentionally written to address the user’s question or instruction without any irrelevant sentences. The answer provides high quality content, demonstrating expert knowledge in the area, is very well written, logical, easy-to-follow, engaging and insightful. Please first provide a chain of thought brief reasoning you used to derive the rating score, and then write "Score: <rating>" in the last line. ``` #### Conversation Score Prompt (Modified) ``` Below are a series of user instructions and corresponding candidate answers in a multi-turn conversation. Evaluate whether or not each answer is a good example of how the AI Assistant should respond to the user’s instructions in the context of an ongoing dialogue. Please assign a score using the following 5-point scale: 1: The answer is incomplete, vague, off-topic, controversial, or fails to build upon previous turns in the conversation. It might ignore context provided earlier, repeat information unnecessarily, or deviate from the conversational flow. Examples include missing content that should logically follow from earlier turns, responses that reset the conversation without acknowledging past interactions, or introducing irrelevant or promotional information. 2: The answer addresses the user's concerns but misses key elements of context or nuance from previous turns. It might provide a generally correct direction but fails to leverage the multi-turn nature of the conversation, such as not recalling information provided earlier or not sufficiently building upon it. 3: The answer is helpful and acknowledges the multi-turn context but reads more like a series of standalone responses rather than a cohesive conversation. It covers the basic asks from the user across multiple turns but might lack a seamless integration of conversation history or a sense of ongoing dialogue. 4: The answer is well-tailored to a multi-turn conversation, showing awareness of previous interactions and building upon them effectively. It is clear, comprehensive, and maintains a conversational flow, with only minor room for improvement, such as refining the integration of past and current turns or enhancing conversational fluidity. 5: The answer exemplifies perfect handling of a multi-turn conversation by an AI Assistant. It seamlessly integrates information from previous turns, providing high-quality, context-aware responses that demonstrate expert knowledge and maintain a logical, engaging, and insightful dialogue flow throughout. Please first provide a brief chain of thought reasoning you used to derive the rating score, considering how well the AI Assistant maintains and builds upon the conversational context. Then write "Score: <rating>" in the last line. ```
0-hero/prompt-perfect
[ "size_categories:1M<n<10M", "language:en", "arxiv:2308.06259", "region:us" ]
2023-10-08T16:27:15+00:00
{"language": ["en"], "size_categories": ["1M<n<10M"]}
2024-02-04T10:55:05+00:00
[ "2308.06259" ]
[ "en" ]
TAGS #size_categories-1M<n<10M #language-English #arxiv-2308.06259 #region-us
Scoring popular datasets with "Self-Alignment with Instruction Backtranslation" prompt ====================================================================================== Scoring Models used ------------------- * gpt-3.5-turbo-16k * gpt-3.5-turbo-1106 All datasets have 2 additional columns -------------------------------------- * score - Response from the model including CoT (if provided) * extracted\_score - Extracted score from the score column as int Datasets Scored by Prompt ------------------------- #### Original Score Prompt from paper * airoboros-2.1 * alpaca-gpt4 * dolphin - Only GPT-4 responses (URL) * open-platypus * orca\_mini\_v1 * SlimOrca-Dedup * Synthia-1.3 * wizard\_alpaca\_dolly\_orca #### Conversation Score Prompt (Modified) * Capybara * ultrachat Score Breakdown --------------- Prompts ------- #### Original Score Prompt from paper #### Conversation Score Prompt (Modified)
[ "#### Original Score Prompt from paper\n\n\n* airoboros-2.1\n* alpaca-gpt4\n* dolphin - Only GPT-4 responses (URL)\n* open-platypus\n* orca\\_mini\\_v1\n* SlimOrca-Dedup\n* Synthia-1.3\n* wizard\\_alpaca\\_dolly\\_orca", "#### Conversation Score Prompt (Modified)\n\n\n* Capybara\n* ultrachat\n\n\nScore Breakdown\n---------------\n\n\n\nPrompts\n-------", "#### Original Score Prompt from paper", "#### Conversation Score Prompt (Modified)" ]
[ "TAGS\n#size_categories-1M<n<10M #language-English #arxiv-2308.06259 #region-us \n", "#### Original Score Prompt from paper\n\n\n* airoboros-2.1\n* alpaca-gpt4\n* dolphin - Only GPT-4 responses (URL)\n* open-platypus\n* orca\\_mini\\_v1\n* SlimOrca-Dedup\n* Synthia-1.3\n* wizard\\_alpaca\\_dolly\\_orca", "#### Conversation Score Prompt (Modified)\n\n\n* Capybara\n* ultrachat\n\n\nScore Breakdown\n---------------\n\n\n\nPrompts\n-------", "#### Original Score Prompt from paper", "#### Conversation Score Prompt (Modified)" ]
[ 31, 83, 31, 9, 13 ]
[ "passage: TAGS\n#size_categories-1M<n<10M #language-English #arxiv-2308.06259 #region-us \n#### Original Score Prompt from paper\n\n\n* airoboros-2.1\n* alpaca-gpt4\n* dolphin - Only GPT-4 responses (URL)\n* open-platypus\n* orca\\_mini\\_v1\n* SlimOrca-Dedup\n* Synthia-1.3\n* wizard\\_alpaca\\_dolly\\_orca#### Conversation Score Prompt (Modified)\n\n\n* Capybara\n* ultrachat\n\n\nScore Breakdown\n---------------\n\n\n\nPrompts\n-------#### Original Score Prompt from paper#### Conversation Score Prompt (Modified)" ]
ddda5db57612ea046c8e71d28eb4cf17148d1ae2
# Dataset Card for Evaluation run of pankajmathur/orca_mini_v3_13b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/pankajmathur/orca_mini_v3_13b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [pankajmathur/orca_mini_v3_13b](https://huggingface.co/pankajmathur/orca_mini_v3_13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_pankajmathur__orca_mini_v3_13b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-28T16:43:24.612769](https://huggingface.co/datasets/open-llm-leaderboard/details_pankajmathur__orca_mini_v3_13b/blob/main/results_2023-10-28T16-43-24.612769.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.15383808724832215, "em_stderr": 0.0036948628598682874, "f1": 0.22225880872483197, "f1_stderr": 0.0037670501187578413, "acc": 0.44797935342421163, "acc_stderr": 0.010609253699619367 }, "harness|drop|3": { "em": 0.15383808724832215, "em_stderr": 0.0036948628598682874, "f1": 0.22225880872483197, "f1_stderr": 0.0037670501187578413 }, "harness|gsm8k|5": { "acc": 0.13115996967399546, "acc_stderr": 0.00929849923558785 }, "harness|winogrande|5": { "acc": 0.7647987371744278, "acc_stderr": 0.011920008163650884 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_pankajmathur__orca_mini_v3_13b
[ "region:us" ]
2023-10-08T16:27:39+00:00
{"pretty_name": "Evaluation run of pankajmathur/orca_mini_v3_13b", "dataset_summary": "Dataset automatically created during the evaluation run of model [pankajmathur/orca_mini_v3_13b](https://huggingface.co/pankajmathur/orca_mini_v3_13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_pankajmathur__orca_mini_v3_13b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-28T16:43:24.612769](https://huggingface.co/datasets/open-llm-leaderboard/details_pankajmathur__orca_mini_v3_13b/blob/main/results_2023-10-28T16-43-24.612769.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.15383808724832215,\n \"em_stderr\": 0.0036948628598682874,\n \"f1\": 0.22225880872483197,\n \"f1_stderr\": 0.0037670501187578413,\n \"acc\": 0.44797935342421163,\n \"acc_stderr\": 0.010609253699619367\n },\n \"harness|drop|3\": {\n \"em\": 0.15383808724832215,\n \"em_stderr\": 0.0036948628598682874,\n \"f1\": 0.22225880872483197,\n \"f1_stderr\": 0.0037670501187578413\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.13115996967399546,\n \"acc_stderr\": 0.00929849923558785\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7647987371744278,\n \"acc_stderr\": 0.011920008163650884\n }\n}\n```", "repo_url": "https://huggingface.co/pankajmathur/orca_mini_v3_13b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|arc:challenge|25_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_28T16_43_24.612769", "path": ["**/details_harness|drop|3_2023-10-28T16-43-24.612769.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-28T16-43-24.612769.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_28T16_43_24.612769", "path": ["**/details_harness|gsm8k|5_2023-10-28T16-43-24.612769.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-28T16-43-24.612769.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hellaswag|10_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T17-27-15.323068.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T17-27-15.323068.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T17-27-15.323068.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_28T16_43_24.612769", "path": ["**/details_harness|winogrande|5_2023-10-28T16-43-24.612769.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-28T16-43-24.612769.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_08T17_27_15.323068", "path": ["results_2023-10-08T17-27-15.323068.parquet"]}, {"split": "2023_10_28T16_43_24.612769", "path": ["results_2023-10-28T16-43-24.612769.parquet"]}, {"split": "latest", "path": ["results_2023-10-28T16-43-24.612769.parquet"]}]}]}
2023-10-28T15:43:36+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of pankajmathur/orca_mini_v3_13b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model pankajmathur/orca_mini_v3_13b on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-28T16:43:24.612769(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of pankajmathur/orca_mini_v3_13b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model pankajmathur/orca_mini_v3_13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-28T16:43:24.612769(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of pankajmathur/orca_mini_v3_13b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model pankajmathur/orca_mini_v3_13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-28T16:43:24.612769(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 24, 31, 172, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of pankajmathur/orca_mini_v3_13b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model pankajmathur/orca_mini_v3_13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-28T16:43:24.612769(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
0939061fac83aa433411b9b433d2b046b074761f
# An Open and Large-Scale Dataset for Multi-Modal Climate Change-aware Crop Yield Predictions ![Motivation](images/dataset-motivation.png) The CropNet dataset is an open, large-scale, and deep learning-ready dataset, specifically targeting climate change-aware crop yield predictions for the contiguous United States (U.S.) continent at the county level. It is composed of three modalities of data, i.e., Sentinel-2 Imagery, WRF-HRRR Computed Dataset, and USDA Crop Dataset, aligned in both the spatial and temporal domains, for over 2200 U.S. counties spanning 6 years (2017-2022). It is expected to facilitate researchers in developing deep learning models for timely and precisely predicting crop yields at the county level, by accounting for the effects of both short-term growing season weather variations and long-term climate change on crop yields. Although our initial goal of crafting the CropNet dataset is for precise crop yield prediction, we believe its future applicability is broad and can benefit the deep learning, agriculture, and meteorology communities, for exploring more interesting, critical, and climate change-related applications, by using one or more modalities of data. ## Contributions #### The `CropNet` dataset - The first *terabyte-sized*, publicly available, and multi-modal dataset for climate change-aware crop yield predictions - The `CropNet` dataset is available at [Google Drive](https://drive.google.com/drive/folders/1Js98GAxf1LeAUTxP1JMZZIrKvyJStDgz) #### The `CropNet` package - A *deep learning-ready* Python package for facilitating researchers in downloading the CropNet data on the fly over the time and region of interest, and developing deep neural networks (DNNs) for climate change-aware crop yield predictions - The `CropNet` package is available at [Python Package Index (PyPI)](https://pypi.org/project/cropnet/) ## Tutorials The tutorials for the CropNet dataset are available at Google Colab, with their links listed below - [Sentinel-2 Imagery Tutorial](https://colab.research.google.com/drive/1Tj69JdhO7aX8ks-4UWYvHrFm9GB1PNCd?usp=sharing) - [WRF-HRRR Computed Dataset Tutorial](https://colab.research.google.com/drive/14l-JSNHtelawNu3kVG_ukTd2WUJpaZEc?usp=sharing) - [USDA Crop Dataset Tutorial](https://colab.research.google.com/drive/1U-vFoRyLSb2l2Q67LeGbkUKTeRaHDkkK?usp=sharing) ## The CropNet Dataset 0ur CropNet dataset is composed of three modalities of data, i.e., Sentinel-2 Imagery, WRF-HRRR Computed Dataset, and USDA Crop Dataset, spanning from 2017 to 2022 (i.e., 6 years) across 2291 U.S. counties, with its geographic distribution illustrated below. We also include the number of counties corresponding to each crop type in the USDA Crop Dataset (see the rightmost bar chart in the figure) since crop planting is highly geography-dependent. ![Geographic Distribution](images/dataset-geo-overview-violet-pastel.png) ### Sentinel-2 Imagery The Sentinel-2 Imagery, obtained from the Sentinel-2 mission, provides high-resolution satellite images for monitoring crop growth on the ground. It contains two types of 224x224 RGB satellite images, agriculture imagery (AG) and normalized difference vegetation index (NDVI), both with a spatial resolution of 9x9 km, and a revisit frequency of 14 days. Examples of AG and NDVI images are depicted as follows. - **Agriculture Imagery (AG)** ![AG](images/dataset-Sentinel2-AG.png) - **Normalized Difference Vegetation Index (NDVI)** ![NDVI](images/dataset-Sentinel2-NDVI.png) ### WRF-HRRR Computed Dataset The WRF-HRRR Computed Dataset, sourced from the WRF-HRRR model, contains daily and monthly meteorological parameters, with the former and the latter designed for capturing direct effects of short-term growing season weather variations on crop growth, and for learning indirect impacts of long-term climate change on crop yields, respectively. It contains 9 meteorological parameters gridded at 9 km in a one-day (and one-month) interval. The figures show the temperature in the spring, the summer, the fall, and the winter, respectively. ![HRRR Temperature](images/dataset-HRRR-temperature.png) ### USDA Crop Dataset The USDA Crop Dataset, collected from the USDA Quick Statistic website, offers valuable information, such as production, yield, etc., for crops grown at each available county. It offers crop information for four types of crops, i.e., corn, cotton, soybeans, and winter wheat, at a county-level basis, with a temporal resolution of one year. The figure illustrates the 2022 Corn Yield across the United States. ![USDA Corn Yield](images/dataset-corn-yield.png) ### The CropNet Package Beyond the contribution of our CropNet dataset, we also release the CropNet package in the Python Package Index (PyPI) for facilitating researchers in downloading the CropNet data based on the time and region of interest, and flexibly building their deep learning models for accurate crop yield predictions. In particular, the CropNet package includes three types of APIs, listed as follows: - **DataDownloader**: This API allows users to download the CropNet data over the time/region of interest on the fly. - **DataRetriever**: With this API, users can conveniently obtain the CropNet data stored in the local machine (e.g., if you have downloaded our curated CropNet from Google Drive) over the time/region of interest. - **DataLoader**: This API is designed to facilitate researchers in developing their DNNs for accurate crop yield predictions. Specifically, the code in this API ( 1) combines all three modalities of data to create $(\mathbf{x}, \mathbf{y_{s}}, \mathbf{y_{l}}, \mathbf{z})$ tuples, with $\mathbf{x}, \mathbf{y_{s}}, \mathbf{y_{l}}, \text{and}~ \mathbf{z}$, respectively representing satellite images, short-term daily whether parameters, long-term monthly meteorological parameters, and ground-truth crop yield (or production) information, and then (2) exposes those tuples via a `Dataset` object after appropriate data pre-processing techniques. ### Installation Researchers and practitioners can install the latest version of CropNet with the following commands: ```python # Create and activate a conda environment conda create -n cropnet_api python=3.10 conda activate cropnet_api # Install the latest version of CropNet pip install cropnet # Slove the ecCodes library dependency issue pip install ecmwflibs ``` ### CropNet API Examples - **Example 1: A DataDownloader Example for Downloading the Up-to-date CropNet Data** Given the time and region (i.e., the FIPS codes for two U.S. counties) of interest, the following code presents how to utilize the **DataDownloader** to download the up-to-date CropNet data: ```python from cropnet.data_downloader import DataDownloader # Use the "target_dir" to specify where the data should be downloaded to downloader = DataDownloader(target_dir="./data") # Download 2022 USDA Soybean data # Note that most of the 2023 USDA data are not yet available downloader.download_USDA("Soybean", fips_codes=["10003", "22007"], years=["2022"]) # Download the 2023 (the 1st and 2nd quarters) Sentinel-2 Imagery downloader.download_Sentinel2(fips_codes=["10003", "22007"], years=["2023"], image_type="AG") downloader.download_Sentinel2(fips_codes=["10003", "22007"], years=["2023"], image_type="NDVI") # Download the 2023 (January to July) WRF-HRRR data downloader.download_HRRR(fips_codes=["10003", "22007"], years=["2023"]) ``` - **Example 2: A DataRetriever Example for Obtaining Our Curated CropNet Data** Given the time and region of interest, the following code shows how to use the **DataRetriever** to obtain the CropNet data stored in the local machine in a user-friendly format: ```python # Use the "base_fir" to specify where the CropNet data is stored retriever = DataRetriever(base_dir="/mnt/data/CropNet") # Retrieve the 2022 USDA Soybean data usda_data = retriever.retrieve_USDA(crop_type="Soybean", fips_codes=["10003", "22007"], years=["2022"]) # Retrieve the 2022 Sentinel-2 Imagery data sentinel2_data = retriever.retrieve_Sentinel2(fips_codes=["10003", "22007"], years=["2022"], image_type="AG") sentinel2_data = retriever.retrieve_Sentinel2(fips_codes=["10003", "22007"], years=["2022"], image_type="NDVI") # Retrieve the 2022 WRF-HRRR data hrrr_data = retriever.retrieve_HRRR(fips_codes=["10003","22007"], years=["2022"]) ``` - **Example 3: A PyTorch Example for Using the DataLoader API for Training DNNs** The following code presents a PyTorch example of training a deep learning model (i.e., MMST-ViT) for climate change-aware crop yield predictions, by utilizing the DataLoader APIs: ```python import torch from torch.utils.data import DataLoader from models_mmst_vit import MMST_ViT from cropnet.dataset.hrrr_computed_dataset import HRRRComputedDataset from cropnet.dataset.sentinel2_imagery import Sentinel2Imagery from cropnet.dataset.usda_crop_dataset import USDACropDataset # The base directory for the CropNet dataset base_dir = "/mnt/data/CropNet" # The JSON configuration file config_file = "data/soybeans_train.json" # The dataloaders for each modality of data sentinel2_loader = DataLoader(Sentinel2Imagery(base_dir, config_file), batch_size=1) hrrr_loader = DataLoader(HRRRComputedDataset(base_dir, config_file), batch_size=1) usda_loader = DataLoader(USDACropDataset(base_dir, config_file), batch_size=1) # The model, the optimizer, and the loss function model = MMST_ViT() optimizer = torch.optim.AdamW(model.parameters(), lr=1e-3, betas=(0.9, 0.999)) criterion = torch.nn.MSELoss() # Traning the model for one epoch for s, h, u in zip(sentinel2_loader, hrrr_loader, usda_loader): # x: satellite images # ys (or yl): short-term daily (or long-term monthly) weather parameters # z: ground-truth crop yield (or production) information x, ys, yl, z, = s[0], h[0], h[1], u[0] optimizer.zero_grad() z_hat = model(x, ys, yl) loss = criterion(z, z_hat) loss.backward() optimizer.step() ``` ## License CropNet has a [Creative Commons Attribution-NonCommercial 4.0 International (CC BY-NC 4.0)](https://creativecommons.org/licenses/by-nc/4.0/) license.
CropNet/CropNet
[ "size_categories:n>1T", "language:en", "license:cc-by-4.0", "climate", "region:us" ]
2023-10-08T16:59:29+00:00
{"language": ["en"], "license": "cc-by-4.0", "size_categories": ["n>1T"], "tags": ["climate"]}
2024-02-14T01:41:14+00:00
[]
[ "en" ]
TAGS #size_categories-n>1T #language-English #license-cc-by-4.0 #climate #region-us
# An Open and Large-Scale Dataset for Multi-Modal Climate Change-aware Crop Yield Predictions !Motivation The CropNet dataset is an open, large-scale, and deep learning-ready dataset, specifically targeting climate change-aware crop yield predictions for the contiguous United States (U.S.) continent at the county level. It is composed of three modalities of data, i.e., Sentinel-2 Imagery, WRF-HRRR Computed Dataset, and USDA Crop Dataset, aligned in both the spatial and temporal domains, for over 2200 U.S. counties spanning 6 years (2017-2022). It is expected to facilitate researchers in developing deep learning models for timely and precisely predicting crop yields at the county level, by accounting for the effects of both short-term growing season weather variations and long-term climate change on crop yields. Although our initial goal of crafting the CropNet dataset is for precise crop yield prediction, we believe its future applicability is broad and can benefit the deep learning, agriculture, and meteorology communities, for exploring more interesting, critical, and climate change-related applications, by using one or more modalities of data. ## Contributions #### The 'CropNet' dataset - The first *terabyte-sized*, publicly available, and multi-modal dataset for climate change-aware crop yield predictions - The 'CropNet' dataset is available at Google Drive #### The 'CropNet' package - A *deep learning-ready* Python package for facilitating researchers in downloading the CropNet data on the fly over the time and region of interest, and developing deep neural networks (DNNs) for climate change-aware crop yield predictions - The 'CropNet' package is available at Python Package Index (PyPI) ## Tutorials The tutorials for the CropNet dataset are available at Google Colab, with their links listed below - Sentinel-2 Imagery Tutorial - WRF-HRRR Computed Dataset Tutorial - USDA Crop Dataset Tutorial ## The CropNet Dataset 0ur CropNet dataset is composed of three modalities of data, i.e., Sentinel-2 Imagery, WRF-HRRR Computed Dataset, and USDA Crop Dataset, spanning from 2017 to 2022 (i.e., 6 years) across 2291 U.S. counties, with its geographic distribution illustrated below. We also include the number of counties corresponding to each crop type in the USDA Crop Dataset (see the rightmost bar chart in the figure) since crop planting is highly geography-dependent. !Geographic Distribution ### Sentinel-2 Imagery The Sentinel-2 Imagery, obtained from the Sentinel-2 mission, provides high-resolution satellite images for monitoring crop growth on the ground. It contains two types of 224x224 RGB satellite images, agriculture imagery (AG) and normalized difference vegetation index (NDVI), both with a spatial resolution of 9x9 km, and a revisit frequency of 14 days. Examples of AG and NDVI images are depicted as follows. - Agriculture Imagery (AG) !AG - Normalized Difference Vegetation Index (NDVI) !NDVI ### WRF-HRRR Computed Dataset The WRF-HRRR Computed Dataset, sourced from the WRF-HRRR model, contains daily and monthly meteorological parameters, with the former and the latter designed for capturing direct effects of short-term growing season weather variations on crop growth, and for learning indirect impacts of long-term climate change on crop yields, respectively. It contains 9 meteorological parameters gridded at 9 km in a one-day (and one-month) interval. The figures show the temperature in the spring, the summer, the fall, and the winter, respectively. !HRRR Temperature ### USDA Crop Dataset The USDA Crop Dataset, collected from the USDA Quick Statistic website, offers valuable information, such as production, yield, etc., for crops grown at each available county. It offers crop information for four types of crops, i.e., corn, cotton, soybeans, and winter wheat, at a county-level basis, with a temporal resolution of one year. The figure illustrates the 2022 Corn Yield across the United States. !USDA Corn Yield ### The CropNet Package Beyond the contribution of our CropNet dataset, we also release the CropNet package in the Python Package Index (PyPI) for facilitating researchers in downloading the CropNet data based on the time and region of interest, and flexibly building their deep learning models for accurate crop yield predictions. In particular, the CropNet package includes three types of APIs, listed as follows: - DataDownloader: This API allows users to download the CropNet data over the time/region of interest on the fly. - DataRetriever: With this API, users can conveniently obtain the CropNet data stored in the local machine (e.g., if you have downloaded our curated CropNet from Google Drive) over the time/region of interest. - DataLoader: This API is designed to facilitate researchers in developing their DNNs for accurate crop yield predictions. Specifically, the code in this API ( 1) combines all three modalities of data to create $(\mathbf{x}, \mathbf{y_{s}}, \mathbf{y_{l}}, \mathbf{z})$ tuples, with $\mathbf{x}, \mathbf{y_{s}}, \mathbf{y_{l}}, \text{and}~ \mathbf{z}$, respectively representing satellite images, short-term daily whether parameters, long-term monthly meteorological parameters, and ground-truth crop yield (or production) information, and then (2) exposes those tuples via a 'Dataset' object after appropriate data pre-processing techniques. ### Installation Researchers and practitioners can install the latest version of CropNet with the following commands: ### CropNet API Examples - Example 1: A DataDownloader Example for Downloading the Up-to-date CropNet Data Given the time and region (i.e., the FIPS codes for two U.S. counties) of interest, the following code presents how to utilize the DataDownloader to download the up-to-date CropNet data: - Example 2: A DataRetriever Example for Obtaining Our Curated CropNet Data Given the time and region of interest, the following code shows how to use the DataRetriever to obtain the CropNet data stored in the local machine in a user-friendly format: - Example 3: A PyTorch Example for Using the DataLoader API for Training DNNs The following code presents a PyTorch example of training a deep learning model (i.e., MMST-ViT) for climate change-aware crop yield predictions, by utilizing the DataLoader APIs: ## License CropNet has a Creative Commons Attribution-NonCommercial 4.0 International (CC BY-NC 4.0) license.
[ "# An Open and Large-Scale Dataset for Multi-Modal Climate Change-aware Crop Yield Predictions\n\n!Motivation\n\n\n\nThe CropNet dataset is an open, large-scale, and deep learning-ready dataset, specifically targeting climate change-aware crop yield predictions for the contiguous United States (U.S.) continent at the county level. It is composed of three modalities of data, i.e., Sentinel-2 Imagery, WRF-HRRR Computed Dataset, and USDA Crop Dataset, aligned in both the spatial and temporal domains, for over 2200 U.S. counties spanning 6 years (2017-2022). It is expected to facilitate researchers in developing deep learning models for timely and precisely predicting crop yields at the county level, by accounting for the effects of both short-term growing season weather variations and long-term climate change on crop yields. Although our initial goal of crafting the CropNet dataset is for precise crop yield prediction, we believe its future applicability is broad and can benefit the deep learning, agriculture, and meteorology communities, for exploring more interesting, critical, and climate change-related applications, by using one or more modalities of data.", "## Contributions", "#### The 'CropNet' dataset\n\n- The first *terabyte-sized*, publicly available, and multi-modal dataset for climate change-aware crop yield predictions\n- The 'CropNet' dataset is available at Google Drive", "#### The 'CropNet' package \n\n- A *deep learning-ready* Python package for facilitating researchers in downloading the CropNet data on the fly over the time and region of interest, and developing deep neural networks (DNNs) for climate change-aware crop yield predictions\n- The 'CropNet' package is available at Python Package Index (PyPI)", "## Tutorials\n\nThe tutorials for the CropNet dataset are available at Google Colab, with their links listed below\n\n- Sentinel-2 Imagery Tutorial\n- WRF-HRRR Computed Dataset Tutorial\n\n- USDA Crop Dataset Tutorial", "## The CropNet Dataset\n\n0ur CropNet dataset is composed of three modalities of data, i.e., Sentinel-2 Imagery, WRF-HRRR Computed Dataset, and USDA Crop Dataset, spanning from 2017 to 2022 (i.e., 6 years) across 2291 U.S. counties, with its geographic distribution illustrated below. We also include the number of counties corresponding to each crop type in the USDA Crop Dataset (see the rightmost bar chart in the figure) since crop planting is highly geography-dependent.\n\n!Geographic Distribution", "### Sentinel-2 Imagery\n\nThe Sentinel-2 Imagery, obtained from the Sentinel-2 mission, provides high-resolution satellite images for monitoring crop growth on the ground. It contains two types of 224x224 RGB satellite images, agriculture imagery (AG) and normalized difference vegetation index (NDVI), both with a spatial resolution of 9x9 km, and a revisit frequency of 14 days. Examples of AG and NDVI images are depicted as follows.\n\n- Agriculture Imagery (AG)\n\n!AG\n\n- Normalized Difference Vegetation Index (NDVI)\n\n!NDVI", "### WRF-HRRR Computed Dataset\n\nThe WRF-HRRR Computed Dataset, sourced from the WRF-HRRR model, contains daily and monthly meteorological parameters, with the former and the latter designed for capturing direct effects of short-term growing season weather variations on crop growth, and for learning indirect impacts of long-term climate change on crop yields, respectively. It contains 9 meteorological parameters gridded at 9 km in a one-day (and one-month) interval. The figures show the temperature in the spring, the summer, the fall, and the winter, respectively.\n\n!HRRR Temperature", "### USDA Crop Dataset\n\nThe USDA Crop Dataset, collected from the USDA Quick Statistic website, offers valuable information, such as production, yield, etc., for crops grown at each available county. It offers crop information for four types of crops, i.e., corn, cotton, soybeans, and winter wheat, at a county-level basis, with a temporal resolution of one year. The figure illustrates the 2022 Corn Yield across the United States.\n\n!USDA Corn Yield", "### The CropNet Package\n\nBeyond the contribution of our CropNet dataset, we also release the CropNet package in the Python Package Index (PyPI) for facilitating researchers in downloading the CropNet data based on the time and region of interest, and flexibly building their deep learning models for accurate crop yield predictions. In particular, the CropNet package includes three types of APIs, listed as follows:\n\n- DataDownloader: This API allows users to download the CropNet data over the time/region of interest on the fly.\n\n- DataRetriever: With this API, users can conveniently obtain the CropNet data stored in the local machine (e.g., if you have downloaded our curated CropNet from Google Drive) over the time/region of interest.\n\n- DataLoader: This API is designed to facilitate researchers in developing their DNNs for accurate crop yield predictions. Specifically, the code in this API ( 1) combines all three modalities of data to create $(\\mathbf{x}, \\mathbf{y_{s}}, \\mathbf{y_{l}}, \\mathbf{z})$ tuples, with $\\mathbf{x}, \\mathbf{y_{s}}, \\mathbf{y_{l}}, \\text{and}~ \\mathbf{z}$, respectively representing satellite images, short-term daily whether parameters, long-term monthly meteorological parameters, and ground-truth crop yield (or production) information, and then (2) exposes those tuples via a 'Dataset' object after appropriate data pre-processing techniques.", "### Installation\n\nResearchers and practitioners can install the latest version of CropNet with the following commands:", "### CropNet API Examples\n\n- Example 1: A DataDownloader Example for Downloading the Up-to-date CropNet Data\n\n Given the time and region (i.e., the FIPS codes for two U.S. counties) of interest, the following code presents how to utilize the DataDownloader to download the up-to-date CropNet data:\n\n\n\n\n\n- Example 2: A DataRetriever Example for Obtaining Our Curated CropNet Data\n\n Given the time and region of interest, the following code shows how to use the DataRetriever to obtain the CropNet data stored in the local machine in a user-friendly format:\n\n\n\n\n\n- Example 3: A PyTorch Example for Using the DataLoader API for Training DNNs \n\nThe following code presents a PyTorch example of training a deep learning model (i.e., MMST-ViT) for climate change-aware crop yield predictions, by utilizing the DataLoader APIs:", "## License\n\nCropNet has a Creative Commons Attribution-NonCommercial 4.0 International (CC BY-NC 4.0) license." ]
[ "TAGS\n#size_categories-n>1T #language-English #license-cc-by-4.0 #climate #region-us \n", "# An Open and Large-Scale Dataset for Multi-Modal Climate Change-aware Crop Yield Predictions\n\n!Motivation\n\n\n\nThe CropNet dataset is an open, large-scale, and deep learning-ready dataset, specifically targeting climate change-aware crop yield predictions for the contiguous United States (U.S.) continent at the county level. It is composed of three modalities of data, i.e., Sentinel-2 Imagery, WRF-HRRR Computed Dataset, and USDA Crop Dataset, aligned in both the spatial and temporal domains, for over 2200 U.S. counties spanning 6 years (2017-2022). It is expected to facilitate researchers in developing deep learning models for timely and precisely predicting crop yields at the county level, by accounting for the effects of both short-term growing season weather variations and long-term climate change on crop yields. Although our initial goal of crafting the CropNet dataset is for precise crop yield prediction, we believe its future applicability is broad and can benefit the deep learning, agriculture, and meteorology communities, for exploring more interesting, critical, and climate change-related applications, by using one or more modalities of data.", "## Contributions", "#### The 'CropNet' dataset\n\n- The first *terabyte-sized*, publicly available, and multi-modal dataset for climate change-aware crop yield predictions\n- The 'CropNet' dataset is available at Google Drive", "#### The 'CropNet' package \n\n- A *deep learning-ready* Python package for facilitating researchers in downloading the CropNet data on the fly over the time and region of interest, and developing deep neural networks (DNNs) for climate change-aware crop yield predictions\n- The 'CropNet' package is available at Python Package Index (PyPI)", "## Tutorials\n\nThe tutorials for the CropNet dataset are available at Google Colab, with their links listed below\n\n- Sentinel-2 Imagery Tutorial\n- WRF-HRRR Computed Dataset Tutorial\n\n- USDA Crop Dataset Tutorial", "## The CropNet Dataset\n\n0ur CropNet dataset is composed of three modalities of data, i.e., Sentinel-2 Imagery, WRF-HRRR Computed Dataset, and USDA Crop Dataset, spanning from 2017 to 2022 (i.e., 6 years) across 2291 U.S. counties, with its geographic distribution illustrated below. We also include the number of counties corresponding to each crop type in the USDA Crop Dataset (see the rightmost bar chart in the figure) since crop planting is highly geography-dependent.\n\n!Geographic Distribution", "### Sentinel-2 Imagery\n\nThe Sentinel-2 Imagery, obtained from the Sentinel-2 mission, provides high-resolution satellite images for monitoring crop growth on the ground. It contains two types of 224x224 RGB satellite images, agriculture imagery (AG) and normalized difference vegetation index (NDVI), both with a spatial resolution of 9x9 km, and a revisit frequency of 14 days. Examples of AG and NDVI images are depicted as follows.\n\n- Agriculture Imagery (AG)\n\n!AG\n\n- Normalized Difference Vegetation Index (NDVI)\n\n!NDVI", "### WRF-HRRR Computed Dataset\n\nThe WRF-HRRR Computed Dataset, sourced from the WRF-HRRR model, contains daily and monthly meteorological parameters, with the former and the latter designed for capturing direct effects of short-term growing season weather variations on crop growth, and for learning indirect impacts of long-term climate change on crop yields, respectively. It contains 9 meteorological parameters gridded at 9 km in a one-day (and one-month) interval. The figures show the temperature in the spring, the summer, the fall, and the winter, respectively.\n\n!HRRR Temperature", "### USDA Crop Dataset\n\nThe USDA Crop Dataset, collected from the USDA Quick Statistic website, offers valuable information, such as production, yield, etc., for crops grown at each available county. It offers crop information for four types of crops, i.e., corn, cotton, soybeans, and winter wheat, at a county-level basis, with a temporal resolution of one year. The figure illustrates the 2022 Corn Yield across the United States.\n\n!USDA Corn Yield", "### The CropNet Package\n\nBeyond the contribution of our CropNet dataset, we also release the CropNet package in the Python Package Index (PyPI) for facilitating researchers in downloading the CropNet data based on the time and region of interest, and flexibly building their deep learning models for accurate crop yield predictions. In particular, the CropNet package includes three types of APIs, listed as follows:\n\n- DataDownloader: This API allows users to download the CropNet data over the time/region of interest on the fly.\n\n- DataRetriever: With this API, users can conveniently obtain the CropNet data stored in the local machine (e.g., if you have downloaded our curated CropNet from Google Drive) over the time/region of interest.\n\n- DataLoader: This API is designed to facilitate researchers in developing their DNNs for accurate crop yield predictions. Specifically, the code in this API ( 1) combines all three modalities of data to create $(\\mathbf{x}, \\mathbf{y_{s}}, \\mathbf{y_{l}}, \\mathbf{z})$ tuples, with $\\mathbf{x}, \\mathbf{y_{s}}, \\mathbf{y_{l}}, \\text{and}~ \\mathbf{z}$, respectively representing satellite images, short-term daily whether parameters, long-term monthly meteorological parameters, and ground-truth crop yield (or production) information, and then (2) exposes those tuples via a 'Dataset' object after appropriate data pre-processing techniques.", "### Installation\n\nResearchers and practitioners can install the latest version of CropNet with the following commands:", "### CropNet API Examples\n\n- Example 1: A DataDownloader Example for Downloading the Up-to-date CropNet Data\n\n Given the time and region (i.e., the FIPS codes for two U.S. counties) of interest, the following code presents how to utilize the DataDownloader to download the up-to-date CropNet data:\n\n\n\n\n\n- Example 2: A DataRetriever Example for Obtaining Our Curated CropNet Data\n\n Given the time and region of interest, the following code shows how to use the DataRetriever to obtain the CropNet data stored in the local machine in a user-friendly format:\n\n\n\n\n\n- Example 3: A PyTorch Example for Using the DataLoader API for Training DNNs \n\nThe following code presents a PyTorch example of training a deep learning model (i.e., MMST-ViT) for climate change-aware crop yield predictions, by utilizing the DataLoader APIs:", "## License\n\nCropNet has a Creative Commons Attribution-NonCommercial 4.0 International (CC BY-NC 4.0) license." ]
[ 33, 292, 4, 58, 88, 51, 139, 139, 147, 120, 384, 25, 222, 26 ]
[ "passage: TAGS\n#size_categories-n>1T #language-English #license-cc-by-4.0 #climate #region-us \n# An Open and Large-Scale Dataset for Multi-Modal Climate Change-aware Crop Yield Predictions\n\n!Motivation\n\n\n\nThe CropNet dataset is an open, large-scale, and deep learning-ready dataset, specifically targeting climate change-aware crop yield predictions for the contiguous United States (U.S.) continent at the county level. It is composed of three modalities of data, i.e., Sentinel-2 Imagery, WRF-HRRR Computed Dataset, and USDA Crop Dataset, aligned in both the spatial and temporal domains, for over 2200 U.S. counties spanning 6 years (2017-2022). It is expected to facilitate researchers in developing deep learning models for timely and precisely predicting crop yields at the county level, by accounting for the effects of both short-term growing season weather variations and long-term climate change on crop yields. Although our initial goal of crafting the CropNet dataset is for precise crop yield prediction, we believe its future applicability is broad and can benefit the deep learning, agriculture, and meteorology communities, for exploring more interesting, critical, and climate change-related applications, by using one or more modalities of data.## Contributions#### The 'CropNet' dataset\n\n- The first *terabyte-sized*, publicly available, and multi-modal dataset for climate change-aware crop yield predictions\n- The 'CropNet' dataset is available at Google Drive#### The 'CropNet' package \n\n- A *deep learning-ready* Python package for facilitating researchers in downloading the CropNet data on the fly over the time and region of interest, and developing deep neural networks (DNNs) for climate change-aware crop yield predictions\n- The 'CropNet' package is available at Python Package Index (PyPI)", "passage: ## Tutorials\n\nThe tutorials for the CropNet dataset are available at Google Colab, with their links listed below\n\n- Sentinel-2 Imagery Tutorial\n- WRF-HRRR Computed Dataset Tutorial\n\n- USDA Crop Dataset Tutorial## The CropNet Dataset\n\n0ur CropNet dataset is composed of three modalities of data, i.e., Sentinel-2 Imagery, WRF-HRRR Computed Dataset, and USDA Crop Dataset, spanning from 2017 to 2022 (i.e., 6 years) across 2291 U.S. counties, with its geographic distribution illustrated below. We also include the number of counties corresponding to each crop type in the USDA Crop Dataset (see the rightmost bar chart in the figure) since crop planting is highly geography-dependent.\n\n!Geographic Distribution### Sentinel-2 Imagery\n\nThe Sentinel-2 Imagery, obtained from the Sentinel-2 mission, provides high-resolution satellite images for monitoring crop growth on the ground. It contains two types of 224x224 RGB satellite images, agriculture imagery (AG) and normalized difference vegetation index (NDVI), both with a spatial resolution of 9x9 km, and a revisit frequency of 14 days. Examples of AG and NDVI images are depicted as follows.\n\n- Agriculture Imagery (AG)\n\n!AG\n\n- Normalized Difference Vegetation Index (NDVI)\n\n!NDVI### WRF-HRRR Computed Dataset\n\nThe WRF-HRRR Computed Dataset, sourced from the WRF-HRRR model, contains daily and monthly meteorological parameters, with the former and the latter designed for capturing direct effects of short-term growing season weather variations on crop growth, and for learning indirect impacts of long-term climate change on crop yields, respectively. It contains 9 meteorological parameters gridded at 9 km in a one-day (and one-month) interval. The figures show the temperature in the spring, the summer, the fall, and the winter, respectively.\n\n!HRRR Temperature", "passage: ### USDA Crop Dataset\n\nThe USDA Crop Dataset, collected from the USDA Quick Statistic website, offers valuable information, such as production, yield, etc., for crops grown at each available county. It offers crop information for four types of crops, i.e., corn, cotton, soybeans, and winter wheat, at a county-level basis, with a temporal resolution of one year. The figure illustrates the 2022 Corn Yield across the United States.\n\n!USDA Corn Yield### The CropNet Package\n\nBeyond the contribution of our CropNet dataset, we also release the CropNet package in the Python Package Index (PyPI) for facilitating researchers in downloading the CropNet data based on the time and region of interest, and flexibly building their deep learning models for accurate crop yield predictions. In particular, the CropNet package includes three types of APIs, listed as follows:\n\n- DataDownloader: This API allows users to download the CropNet data over the time/region of interest on the fly.\n\n- DataRetriever: With this API, users can conveniently obtain the CropNet data stored in the local machine (e.g., if you have downloaded our curated CropNet from Google Drive) over the time/region of interest.\n\n- DataLoader: This API is designed to facilitate researchers in developing their DNNs for accurate crop yield predictions. Specifically, the code in this API ( 1) combines all three modalities of data to create $(\\mathbf{x}, \\mathbf{y_{s}}, \\mathbf{y_{l}}, \\mathbf{z})$ tuples, with $\\mathbf{x}, \\mathbf{y_{s}}, \\mathbf{y_{l}}, \\text{and}~ \\mathbf{z}$, respectively representing satellite images, short-term daily whether parameters, long-term monthly meteorological parameters, and ground-truth crop yield (or production) information, and then (2) exposes those tuples via a 'Dataset' object after appropriate data pre-processing techniques.### Installation\n\nResearchers and practitioners can install the latest version of CropNet with the following commands:" ]
f5e5dd5cf4b8b64cdc01123954b8b3c0e523a3c8
# Dataset Card for Evaluation run of LeoLM/leo-hessianai-13b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/LeoLM/leo-hessianai-13b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [LeoLM/leo-hessianai-13b](https://huggingface.co/LeoLM/leo-hessianai-13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_LeoLM__leo-hessianai-13b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-23T18:16:50.877675](https://huggingface.co/datasets/open-llm-leaderboard/details_LeoLM__leo-hessianai-13b/blob/main/results_2023-10-23T18-16-50.877675.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.001363255033557047, "em_stderr": 0.0003778609196460785, "f1": 0.05912332214765112, "f1_stderr": 0.001345589828621863, "acc": 0.425157060340252, "acc_stderr": 0.00992506244739182 }, "harness|drop|3": { "em": 0.001363255033557047, "em_stderr": 0.0003778609196460785, "f1": 0.05912332214765112, "f1_stderr": 0.001345589828621863 }, "harness|gsm8k|5": { "acc": 0.08946171341925702, "acc_stderr": 0.007861583049939733 }, "harness|winogrande|5": { "acc": 0.760852407261247, "acc_stderr": 0.011988541844843905 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_LeoLM__leo-hessianai-13b
[ "region:us" ]
2023-10-08T16:59:54+00:00
{"pretty_name": "Evaluation run of LeoLM/leo-hessianai-13b", "dataset_summary": "Dataset automatically created during the evaluation run of model [LeoLM/leo-hessianai-13b](https://huggingface.co/LeoLM/leo-hessianai-13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_LeoLM__leo-hessianai-13b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-23T18:16:50.877675](https://huggingface.co/datasets/open-llm-leaderboard/details_LeoLM__leo-hessianai-13b/blob/main/results_2023-10-23T18-16-50.877675.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.001363255033557047,\n \"em_stderr\": 0.0003778609196460785,\n \"f1\": 0.05912332214765112,\n \"f1_stderr\": 0.001345589828621863,\n \"acc\": 0.425157060340252,\n \"acc_stderr\": 0.00992506244739182\n },\n \"harness|drop|3\": {\n \"em\": 0.001363255033557047,\n \"em_stderr\": 0.0003778609196460785,\n \"f1\": 0.05912332214765112,\n \"f1_stderr\": 0.001345589828621863\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.08946171341925702,\n \"acc_stderr\": 0.007861583049939733\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.760852407261247,\n \"acc_stderr\": 0.011988541844843905\n }\n}\n```", "repo_url": "https://huggingface.co/LeoLM/leo-hessianai-13b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|arc:challenge|25_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_23T18_16_50.877675", "path": ["**/details_harness|drop|3_2023-10-23T18-16-50.877675.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-23T18-16-50.877675.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_23T18_16_50.877675", "path": ["**/details_harness|gsm8k|5_2023-10-23T18-16-50.877675.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-23T18-16-50.877675.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hellaswag|10_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T17-59-31.182651.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T17-59-31.182651.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T17-59-31.182651.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_23T18_16_50.877675", "path": ["**/details_harness|winogrande|5_2023-10-23T18-16-50.877675.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-23T18-16-50.877675.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_08T17_59_31.182651", "path": ["results_2023-10-08T17-59-31.182651.parquet"]}, {"split": "2023_10_23T18_16_50.877675", "path": ["results_2023-10-23T18-16-50.877675.parquet"]}, {"split": "latest", "path": ["results_2023-10-23T18-16-50.877675.parquet"]}]}]}
2023-10-23T17:17:03+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of LeoLM/leo-hessianai-13b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model LeoLM/leo-hessianai-13b on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-23T18:16:50.877675(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of LeoLM/leo-hessianai-13b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model LeoLM/leo-hessianai-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-23T18:16:50.877675(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of LeoLM/leo-hessianai-13b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model LeoLM/leo-hessianai-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-23T18:16:50.877675(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 19, 31, 167, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of LeoLM/leo-hessianai-13b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model LeoLM/leo-hessianai-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-23T18:16:50.877675(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
4cc587281fe2fb2168f53c83fa14bfcb16dd0b09
# Dataset Card for Evaluation run of Lazycuber/L2-7b-Guanaco-Random-Test ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/Lazycuber/L2-7b-Guanaco-Random-Test - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [Lazycuber/L2-7b-Guanaco-Random-Test](https://huggingface.co/Lazycuber/L2-7b-Guanaco-Random-Test) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Lazycuber__L2-7b-Guanaco-Random-Test", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-10-08T18:13:47.081600](https://huggingface.co/datasets/open-llm-leaderboard/details_Lazycuber__L2-7b-Guanaco-Random-Test/blob/main/results_2023-10-08T18-13-47.081600.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.47820349788584665, "acc_stderr": 0.03520803674350638, "acc_norm": 0.4820937504834085, "acc_norm_stderr": 0.03519557788566828, "mc1": 0.27906976744186046, "mc1_stderr": 0.0157021070906279, "mc2": 0.4232640996589444, "mc2_stderr": 0.01477991946603906 }, "harness|arc:challenge|25": { "acc": 0.4761092150170648, "acc_stderr": 0.014594701798071654, "acc_norm": 0.5059726962457338, "acc_norm_stderr": 0.014610348300255795 }, "harness|hellaswag|10": { "acc": 0.5723959370643298, "acc_stderr": 0.004937199759947679, "acc_norm": 0.7720573590918144, "acc_norm_stderr": 0.004186480645315568 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.43703703703703706, "acc_stderr": 0.042849586397533994, "acc_norm": 0.43703703703703706, "acc_norm_stderr": 0.042849586397533994 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5131578947368421, "acc_stderr": 0.04067533136309173, "acc_norm": 0.5131578947368421, "acc_norm_stderr": 0.04067533136309173 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5169811320754717, "acc_stderr": 0.030755120364119905, "acc_norm": 0.5169811320754717, "acc_norm_stderr": 0.030755120364119905 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5138888888888888, "acc_stderr": 0.041795966175810016, "acc_norm": 0.5138888888888888, "acc_norm_stderr": 0.041795966175810016 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.3699421965317919, "acc_stderr": 0.036812296333943194, "acc_norm": 0.3699421965317919, "acc_norm_stderr": 0.036812296333943194 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.24509803921568626, "acc_stderr": 0.042801058373643966, "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.042801058373643966 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.57, "acc_stderr": 0.049756985195624284, "acc_norm": 0.57, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.425531914893617, "acc_stderr": 0.03232146916224469, "acc_norm": 0.425531914893617, "acc_norm_stderr": 0.03232146916224469 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.38596491228070173, "acc_stderr": 0.04579639422070434, "acc_norm": 0.38596491228070173, "acc_norm_stderr": 0.04579639422070434 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.503448275862069, "acc_stderr": 0.041665675771015785, "acc_norm": 0.503448275862069, "acc_norm_stderr": 0.041665675771015785 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.31216931216931215, "acc_stderr": 0.0238652068369726, "acc_norm": 0.31216931216931215, "acc_norm_stderr": 0.0238652068369726 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.24603174603174602, "acc_stderr": 0.038522733649243156, "acc_norm": 0.24603174603174602, "acc_norm_stderr": 0.038522733649243156 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.5290322580645161, "acc_stderr": 0.028396016402761005, "acc_norm": 0.5290322580645161, "acc_norm_stderr": 0.028396016402761005 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.3793103448275862, "acc_stderr": 0.03413963805906235, "acc_norm": 0.3793103448275862, "acc_norm_stderr": 0.03413963805906235 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.5818181818181818, "acc_stderr": 0.03851716319398395, "acc_norm": 0.5818181818181818, "acc_norm_stderr": 0.03851716319398395 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.5909090909090909, "acc_stderr": 0.03502975799413007, "acc_norm": 0.5909090909090909, "acc_norm_stderr": 0.03502975799413007 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.6683937823834197, "acc_stderr": 0.03397636541089118, "acc_norm": 0.6683937823834197, "acc_norm_stderr": 0.03397636541089118 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.4128205128205128, "acc_stderr": 0.024962683564331803, "acc_norm": 0.4128205128205128, "acc_norm_stderr": 0.024962683564331803 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2740740740740741, "acc_stderr": 0.027195934804085626, "acc_norm": 0.2740740740740741, "acc_norm_stderr": 0.027195934804085626 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.3907563025210084, "acc_stderr": 0.031693802357129965, "acc_norm": 0.3907563025210084, "acc_norm_stderr": 0.031693802357129965 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31788079470198677, "acc_stderr": 0.03802039760107903, "acc_norm": 0.31788079470198677, "acc_norm_stderr": 0.03802039760107903 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.6642201834862386, "acc_stderr": 0.020248081396752927, "acc_norm": 0.6642201834862386, "acc_norm_stderr": 0.020248081396752927 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.2962962962962963, "acc_stderr": 0.031141447823536016, "acc_norm": 0.2962962962962963, "acc_norm_stderr": 0.031141447823536016 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.6421568627450981, "acc_stderr": 0.03364487286088298, "acc_norm": 0.6421568627450981, "acc_norm_stderr": 0.03364487286088298 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.6286919831223629, "acc_stderr": 0.0314506860074486, "acc_norm": 0.6286919831223629, "acc_norm_stderr": 0.0314506860074486 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.5560538116591929, "acc_stderr": 0.03334625674242728, "acc_norm": 0.5560538116591929, "acc_norm_stderr": 0.03334625674242728 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.5572519083969466, "acc_stderr": 0.04356447202665069, "acc_norm": 0.5572519083969466, "acc_norm_stderr": 0.04356447202665069 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6694214876033058, "acc_stderr": 0.04294340845212093, "acc_norm": 0.6694214876033058, "acc_norm_stderr": 0.04294340845212093 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.5925925925925926, "acc_stderr": 0.04750077341199984, "acc_norm": 0.5925925925925926, "acc_norm_stderr": 0.04750077341199984 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.5153374233128835, "acc_stderr": 0.03926522378708843, "acc_norm": 0.5153374233128835, "acc_norm_stderr": 0.03926522378708843 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.35714285714285715, "acc_stderr": 0.04547960999764376, "acc_norm": 0.35714285714285715, "acc_norm_stderr": 0.04547960999764376 }, "harness|hendrycksTest-management|5": { "acc": 0.6407766990291263, "acc_stderr": 0.047504583990416946, "acc_norm": 0.6407766990291263, "acc_norm_stderr": 0.047504583990416946 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7307692307692307, "acc_stderr": 0.029058588303748842, "acc_norm": 0.7307692307692307, "acc_norm_stderr": 0.029058588303748842 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.51, "acc_stderr": 0.05024183937956913, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956913 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.6845466155810983, "acc_stderr": 0.016617501738763387, "acc_norm": 0.6845466155810983, "acc_norm_stderr": 0.016617501738763387 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.5260115606936416, "acc_stderr": 0.02688264343402289, "acc_norm": 0.5260115606936416, "acc_norm_stderr": 0.02688264343402289 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.22681564245810057, "acc_stderr": 0.014005843570897895, "acc_norm": 0.22681564245810057, "acc_norm_stderr": 0.014005843570897895 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.5392156862745098, "acc_stderr": 0.028541722692618874, "acc_norm": 0.5392156862745098, "acc_norm_stderr": 0.028541722692618874 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.5466237942122186, "acc_stderr": 0.02827435985489426, "acc_norm": 0.5466237942122186, "acc_norm_stderr": 0.02827435985489426 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.558641975308642, "acc_stderr": 0.027628737155668763, "acc_norm": 0.558641975308642, "acc_norm_stderr": 0.027628737155668763 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.3617021276595745, "acc_stderr": 0.028663820147199495, "acc_norm": 0.3617021276595745, "acc_norm_stderr": 0.028663820147199495 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.32790091264667537, "acc_stderr": 0.011989936640666525, "acc_norm": 0.32790091264667537, "acc_norm_stderr": 0.011989936640666525 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.39705882352941174, "acc_stderr": 0.029722152099280065, "acc_norm": 0.39705882352941174, "acc_norm_stderr": 0.029722152099280065 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.46895424836601307, "acc_stderr": 0.020188804456361883, "acc_norm": 0.46895424836601307, "acc_norm_stderr": 0.020188804456361883 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.509090909090909, "acc_stderr": 0.0478833976870286, "acc_norm": 0.509090909090909, "acc_norm_stderr": 0.0478833976870286 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.5224489795918368, "acc_stderr": 0.03197694118713672, "acc_norm": 0.5224489795918368, "acc_norm_stderr": 0.03197694118713672 }, "harness|hendrycksTest-sociology|5": { "acc": 0.6218905472636815, "acc_stderr": 0.034288678487786564, "acc_norm": 0.6218905472636815, "acc_norm_stderr": 0.034288678487786564 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.66, "acc_stderr": 0.04760952285695237, "acc_norm": 0.66, "acc_norm_stderr": 0.04760952285695237 }, "harness|hendrycksTest-virology|5": { "acc": 0.43373493975903615, "acc_stderr": 0.03858158940685517, "acc_norm": 0.43373493975903615, "acc_norm_stderr": 0.03858158940685517 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.695906432748538, "acc_stderr": 0.0352821125824523, "acc_norm": 0.695906432748538, "acc_norm_stderr": 0.0352821125824523 }, "harness|truthfulqa:mc|0": { "mc1": 0.27906976744186046, "mc1_stderr": 0.0157021070906279, "mc2": 0.4232640996589444, "mc2_stderr": 0.01477991946603906 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_Lazycuber__L2-7b-Guanaco-Random-Test
[ "region:us" ]
2023-10-08T17:14:11+00:00
{"pretty_name": "Evaluation run of Lazycuber/L2-7b-Guanaco-Random-Test", "dataset_summary": "Dataset automatically created during the evaluation run of model [Lazycuber/L2-7b-Guanaco-Random-Test](https://huggingface.co/Lazycuber/L2-7b-Guanaco-Random-Test) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Lazycuber__L2-7b-Guanaco-Random-Test\",\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-08T18:13:47.081600](https://huggingface.co/datasets/open-llm-leaderboard/details_Lazycuber__L2-7b-Guanaco-Random-Test/blob/main/results_2023-10-08T18-13-47.081600.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.47820349788584665,\n \"acc_stderr\": 0.03520803674350638,\n \"acc_norm\": 0.4820937504834085,\n \"acc_norm_stderr\": 0.03519557788566828,\n \"mc1\": 0.27906976744186046,\n \"mc1_stderr\": 0.0157021070906279,\n \"mc2\": 0.4232640996589444,\n \"mc2_stderr\": 0.01477991946603906\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.4761092150170648,\n \"acc_stderr\": 0.014594701798071654,\n \"acc_norm\": 0.5059726962457338,\n \"acc_norm_stderr\": 0.014610348300255795\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5723959370643298,\n \"acc_stderr\": 0.004937199759947679,\n \"acc_norm\": 0.7720573590918144,\n \"acc_norm_stderr\": 0.004186480645315568\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.43703703703703706,\n \"acc_stderr\": 0.042849586397533994,\n \"acc_norm\": 0.43703703703703706,\n \"acc_norm_stderr\": 0.042849586397533994\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.5131578947368421,\n \"acc_stderr\": 0.04067533136309173,\n \"acc_norm\": 0.5131578947368421,\n \"acc_norm_stderr\": 0.04067533136309173\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.5169811320754717,\n \"acc_stderr\": 0.030755120364119905,\n \"acc_norm\": 0.5169811320754717,\n \"acc_norm_stderr\": 0.030755120364119905\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5138888888888888,\n \"acc_stderr\": 0.041795966175810016,\n \"acc_norm\": 0.5138888888888888,\n \"acc_norm_stderr\": 0.041795966175810016\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.3699421965317919,\n \"acc_stderr\": 0.036812296333943194,\n \"acc_norm\": 0.3699421965317919,\n \"acc_norm_stderr\": 0.036812296333943194\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.24509803921568626,\n \"acc_stderr\": 0.042801058373643966,\n \"acc_norm\": 0.24509803921568626,\n \"acc_norm_stderr\": 0.042801058373643966\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.425531914893617,\n \"acc_stderr\": 0.03232146916224469,\n \"acc_norm\": 0.425531914893617,\n \"acc_norm_stderr\": 0.03232146916224469\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.38596491228070173,\n \"acc_stderr\": 0.04579639422070434,\n \"acc_norm\": 0.38596491228070173,\n \"acc_norm_stderr\": 0.04579639422070434\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.503448275862069,\n \"acc_stderr\": 0.041665675771015785,\n \"acc_norm\": 0.503448275862069,\n \"acc_norm_stderr\": 0.041665675771015785\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.31216931216931215,\n \"acc_stderr\": 0.0238652068369726,\n \"acc_norm\": 0.31216931216931215,\n \"acc_norm_stderr\": 0.0238652068369726\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.24603174603174602,\n \"acc_stderr\": 0.038522733649243156,\n \"acc_norm\": 0.24603174603174602,\n \"acc_norm_stderr\": 0.038522733649243156\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.5290322580645161,\n \"acc_stderr\": 0.028396016402761005,\n \"acc_norm\": 0.5290322580645161,\n \"acc_norm_stderr\": 0.028396016402761005\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.3793103448275862,\n \"acc_stderr\": 0.03413963805906235,\n \"acc_norm\": 0.3793103448275862,\n \"acc_norm_stderr\": 0.03413963805906235\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.5818181818181818,\n \"acc_stderr\": 0.03851716319398395,\n \"acc_norm\": 0.5818181818181818,\n \"acc_norm_stderr\": 0.03851716319398395\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.5909090909090909,\n \"acc_stderr\": 0.03502975799413007,\n \"acc_norm\": 0.5909090909090909,\n \"acc_norm_stderr\": 0.03502975799413007\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.6683937823834197,\n \"acc_stderr\": 0.03397636541089118,\n \"acc_norm\": 0.6683937823834197,\n \"acc_norm_stderr\": 0.03397636541089118\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.4128205128205128,\n \"acc_stderr\": 0.024962683564331803,\n \"acc_norm\": 0.4128205128205128,\n \"acc_norm_stderr\": 0.024962683564331803\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2740740740740741,\n \"acc_stderr\": 0.027195934804085626,\n \"acc_norm\": 0.2740740740740741,\n \"acc_norm_stderr\": 0.027195934804085626\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.3907563025210084,\n \"acc_stderr\": 0.031693802357129965,\n \"acc_norm\": 0.3907563025210084,\n \"acc_norm_stderr\": 0.031693802357129965\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.31788079470198677,\n \"acc_stderr\": 0.03802039760107903,\n \"acc_norm\": 0.31788079470198677,\n \"acc_norm_stderr\": 0.03802039760107903\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.6642201834862386,\n \"acc_stderr\": 0.020248081396752927,\n \"acc_norm\": 0.6642201834862386,\n \"acc_norm_stderr\": 0.020248081396752927\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.2962962962962963,\n \"acc_stderr\": 0.031141447823536016,\n \"acc_norm\": 0.2962962962962963,\n \"acc_norm_stderr\": 0.031141447823536016\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.6421568627450981,\n \"acc_stderr\": 0.03364487286088298,\n \"acc_norm\": 0.6421568627450981,\n \"acc_norm_stderr\": 0.03364487286088298\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.6286919831223629,\n \"acc_stderr\": 0.0314506860074486,\n \"acc_norm\": 0.6286919831223629,\n \"acc_norm_stderr\": 0.0314506860074486\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.5560538116591929,\n \"acc_stderr\": 0.03334625674242728,\n \"acc_norm\": 0.5560538116591929,\n \"acc_norm_stderr\": 0.03334625674242728\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.5572519083969466,\n \"acc_stderr\": 0.04356447202665069,\n \"acc_norm\": 0.5572519083969466,\n \"acc_norm_stderr\": 0.04356447202665069\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.6694214876033058,\n \"acc_stderr\": 0.04294340845212093,\n \"acc_norm\": 0.6694214876033058,\n \"acc_norm_stderr\": 0.04294340845212093\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.5925925925925926,\n \"acc_stderr\": 0.04750077341199984,\n \"acc_norm\": 0.5925925925925926,\n \"acc_norm_stderr\": 0.04750077341199984\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.5153374233128835,\n \"acc_stderr\": 0.03926522378708843,\n \"acc_norm\": 0.5153374233128835,\n \"acc_norm_stderr\": 0.03926522378708843\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.35714285714285715,\n \"acc_stderr\": 0.04547960999764376,\n \"acc_norm\": 0.35714285714285715,\n \"acc_norm_stderr\": 0.04547960999764376\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.6407766990291263,\n \"acc_stderr\": 0.047504583990416946,\n \"acc_norm\": 0.6407766990291263,\n \"acc_norm_stderr\": 0.047504583990416946\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7307692307692307,\n \"acc_stderr\": 0.029058588303748842,\n \"acc_norm\": 0.7307692307692307,\n \"acc_norm_stderr\": 0.029058588303748842\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956913,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956913\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.6845466155810983,\n \"acc_stderr\": 0.016617501738763387,\n \"acc_norm\": 0.6845466155810983,\n \"acc_norm_stderr\": 0.016617501738763387\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.5260115606936416,\n \"acc_stderr\": 0.02688264343402289,\n \"acc_norm\": 0.5260115606936416,\n \"acc_norm_stderr\": 0.02688264343402289\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.22681564245810057,\n \"acc_stderr\": 0.014005843570897895,\n \"acc_norm\": 0.22681564245810057,\n \"acc_norm_stderr\": 0.014005843570897895\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.5392156862745098,\n \"acc_stderr\": 0.028541722692618874,\n \"acc_norm\": 0.5392156862745098,\n \"acc_norm_stderr\": 0.028541722692618874\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.5466237942122186,\n \"acc_stderr\": 0.02827435985489426,\n \"acc_norm\": 0.5466237942122186,\n \"acc_norm_stderr\": 0.02827435985489426\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.558641975308642,\n \"acc_stderr\": 0.027628737155668763,\n \"acc_norm\": 0.558641975308642,\n \"acc_norm_stderr\": 0.027628737155668763\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.3617021276595745,\n \"acc_stderr\": 0.028663820147199495,\n \"acc_norm\": 0.3617021276595745,\n \"acc_norm_stderr\": 0.028663820147199495\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.32790091264667537,\n \"acc_stderr\": 0.011989936640666525,\n \"acc_norm\": 0.32790091264667537,\n \"acc_norm_stderr\": 0.011989936640666525\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.39705882352941174,\n \"acc_stderr\": 0.029722152099280065,\n \"acc_norm\": 0.39705882352941174,\n \"acc_norm_stderr\": 0.029722152099280065\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.46895424836601307,\n \"acc_stderr\": 0.020188804456361883,\n \"acc_norm\": 0.46895424836601307,\n \"acc_norm_stderr\": 0.020188804456361883\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.509090909090909,\n \"acc_stderr\": 0.0478833976870286,\n \"acc_norm\": 0.509090909090909,\n \"acc_norm_stderr\": 0.0478833976870286\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.5224489795918368,\n \"acc_stderr\": 0.03197694118713672,\n \"acc_norm\": 0.5224489795918368,\n \"acc_norm_stderr\": 0.03197694118713672\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6218905472636815,\n \"acc_stderr\": 0.034288678487786564,\n \"acc_norm\": 0.6218905472636815,\n \"acc_norm_stderr\": 0.034288678487786564\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.66,\n \"acc_stderr\": 0.04760952285695237,\n \"acc_norm\": 0.66,\n \"acc_norm_stderr\": 0.04760952285695237\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.43373493975903615,\n \"acc_stderr\": 0.03858158940685517,\n \"acc_norm\": 0.43373493975903615,\n \"acc_norm_stderr\": 0.03858158940685517\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.695906432748538,\n \"acc_stderr\": 0.0352821125824523,\n \"acc_norm\": 0.695906432748538,\n \"acc_norm_stderr\": 0.0352821125824523\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.27906976744186046,\n \"mc1_stderr\": 0.0157021070906279,\n \"mc2\": 0.4232640996589444,\n \"mc2_stderr\": 0.01477991946603906\n }\n}\n```", "repo_url": "https://huggingface.co/Lazycuber/L2-7b-Guanaco-Random-Test", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|arc:challenge|25_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hellaswag|10_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T18-13-47.081600.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T18-13-47.081600.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_08T18_13_47.081600", "path": ["results_2023-10-08T18-13-47.081600.parquet"]}, {"split": "latest", "path": ["results_2023-10-08T18-13-47.081600.parquet"]}]}]}
2023-10-08T17:15:11+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Lazycuber/L2-7b-Guanaco-Random-Test ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model Lazycuber/L2-7b-Guanaco-Random-Test on the Open LLM Leaderboard. The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-08T18:13:47.081600(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of Lazycuber/L2-7b-Guanaco-Random-Test", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Lazycuber/L2-7b-Guanaco-Random-Test on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-08T18:13:47.081600(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Lazycuber/L2-7b-Guanaco-Random-Test", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Lazycuber/L2-7b-Guanaco-Random-Test on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-08T18:13:47.081600(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 27, 31, 175, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Lazycuber/L2-7b-Guanaco-Random-Test## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model Lazycuber/L2-7b-Guanaco-Random-Test on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-08T18:13:47.081600(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
b5bfa022726f51579e7133cff1ca10f726daf732
# Dataset Card for Evaluation run of jondurbin/airoboros-l2-13b-3.0 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/jondurbin/airoboros-l2-13b-3.0 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [jondurbin/airoboros-l2-13b-3.0](https://huggingface.co/jondurbin/airoboros-l2-13b-3.0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_jondurbin__airoboros-l2-13b-3.0", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-24T07:06:03.975558](https://huggingface.co/datasets/open-llm-leaderboard/details_jondurbin__airoboros-l2-13b-3.0/blob/main/results_2023-10-24T07-06-03.975558.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.20868288590604026, "em_stderr": 0.004161580956848853, "f1": 0.26992973993288605, "f1_stderr": 0.004166447885566019, "acc": 0.4255516933315701, "acc_stderr": 0.009918265858821027 }, "harness|drop|3": { "em": 0.20868288590604026, "em_stderr": 0.004161580956848853, "f1": 0.26992973993288605, "f1_stderr": 0.004166447885566019 }, "harness|gsm8k|5": { "acc": 0.08946171341925702, "acc_stderr": 0.007861583049939738 }, "harness|winogrande|5": { "acc": 0.7616416732438832, "acc_stderr": 0.011974948667702314 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_jondurbin__airoboros-l2-13b-3.0
[ "region:us" ]
2023-10-08T17:14:55+00:00
{"pretty_name": "Evaluation run of jondurbin/airoboros-l2-13b-3.0", "dataset_summary": "Dataset automatically created during the evaluation run of model [jondurbin/airoboros-l2-13b-3.0](https://huggingface.co/jondurbin/airoboros-l2-13b-3.0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_jondurbin__airoboros-l2-13b-3.0\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-24T07:06:03.975558](https://huggingface.co/datasets/open-llm-leaderboard/details_jondurbin__airoboros-l2-13b-3.0/blob/main/results_2023-10-24T07-06-03.975558.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.20868288590604026,\n \"em_stderr\": 0.004161580956848853,\n \"f1\": 0.26992973993288605,\n \"f1_stderr\": 0.004166447885566019,\n \"acc\": 0.4255516933315701,\n \"acc_stderr\": 0.009918265858821027\n },\n \"harness|drop|3\": {\n \"em\": 0.20868288590604026,\n \"em_stderr\": 0.004161580956848853,\n \"f1\": 0.26992973993288605,\n \"f1_stderr\": 0.004166447885566019\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.08946171341925702,\n \"acc_stderr\": 0.007861583049939738\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7616416732438832,\n \"acc_stderr\": 0.011974948667702314\n }\n}\n```", "repo_url": "https://huggingface.co/jondurbin/airoboros-l2-13b-3.0", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|arc:challenge|25_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_24T07_06_03.975558", "path": ["**/details_harness|drop|3_2023-10-24T07-06-03.975558.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-24T07-06-03.975558.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_24T07_06_03.975558", "path": ["**/details_harness|gsm8k|5_2023-10-24T07-06-03.975558.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-24T07-06-03.975558.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hellaswag|10_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T18-14-31.712178.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T18-14-31.712178.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T18-14-31.712178.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_24T07_06_03.975558", "path": ["**/details_harness|winogrande|5_2023-10-24T07-06-03.975558.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-24T07-06-03.975558.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_08T18_14_31.712178", "path": ["results_2023-10-08T18-14-31.712178.parquet"]}, {"split": "2023_10_24T07_06_03.975558", "path": ["results_2023-10-24T07-06-03.975558.parquet"]}, {"split": "latest", "path": ["results_2023-10-24T07-06-03.975558.parquet"]}]}]}
2023-10-24T06:06:16+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of jondurbin/airoboros-l2-13b-3.0 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model jondurbin/airoboros-l2-13b-3.0 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-24T07:06:03.975558(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of jondurbin/airoboros-l2-13b-3.0", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model jondurbin/airoboros-l2-13b-3.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-24T07:06:03.975558(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of jondurbin/airoboros-l2-13b-3.0", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model jondurbin/airoboros-l2-13b-3.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-24T07:06:03.975558(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 23, 31, 171, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of jondurbin/airoboros-l2-13b-3.0## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model jondurbin/airoboros-l2-13b-3.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-24T07:06:03.975558(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
c8594ae233c9a9bf5a1051873a85861de54cd3e3
# Dataset Card for "rte_few_shot_arc" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
SumanthRH/rte_few_shot_arc
[ "region:us" ]
2023-10-08T17:24:42+00:00
{"dataset_info": {"features": [{"name": "problem", "dtype": "string"}, {"name": "solution", "dtype": "string"}, {"name": "response", "dtype": "string"}, {"name": "message", "list": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}], "splits": [{"name": "train", "num_bytes": 437310, "num_examples": 200}], "download_size": 125236, "dataset_size": 437310}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-08T17:26:17+00:00
[]
[]
TAGS #region-us
# Dataset Card for "rte_few_shot_arc" More Information needed
[ "# Dataset Card for \"rte_few_shot_arc\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"rte_few_shot_arc\"\n\nMore Information needed" ]
[ 6, 19 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"rte_few_shot_arc\"\n\nMore Information needed" ]
12ba29ec7e95bb07051302e54d85d28ba53bbc61
# Dataset Card for Evaluation run of JosephusCheung/Pwen-14B-Chat-20_30 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/JosephusCheung/Pwen-14B-Chat-20_30 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [JosephusCheung/Pwen-14B-Chat-20_30](https://huggingface.co/JosephusCheung/Pwen-14B-Chat-20_30) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_JosephusCheung__Pwen-14B-Chat-20_30", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-27T13:38:56.103845](https://huggingface.co/datasets/open-llm-leaderboard/details_JosephusCheung__Pwen-14B-Chat-20_30/blob/main/results_2023-10-27T13-38-56.103845.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.2828229865771812, "em_stderr": 0.004612221798127954, "f1": 0.3398972315436241, "f1_stderr": 0.004521141568402689, "acc": 0.5173500888298219, "acc_stderr": 0.012073725510059884 }, "harness|drop|3": { "em": 0.2828229865771812, "em_stderr": 0.004612221798127954, "f1": 0.3398972315436241, "f1_stderr": 0.004521141568402689 }, "harness|gsm8k|5": { "acc": 0.2699014404852161, "acc_stderr": 0.012227442856468897 }, "harness|winogrande|5": { "acc": 0.7647987371744278, "acc_stderr": 0.011920008163650872 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_JosephusCheung__Pwen-14B-Chat-20_30
[ "region:us" ]
2023-10-08T17:25:46+00:00
{"pretty_name": "Evaluation run of JosephusCheung/Pwen-14B-Chat-20_30", "dataset_summary": "Dataset automatically created during the evaluation run of model [JosephusCheung/Pwen-14B-Chat-20_30](https://huggingface.co/JosephusCheung/Pwen-14B-Chat-20_30) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_JosephusCheung__Pwen-14B-Chat-20_30\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-27T13:38:56.103845](https://huggingface.co/datasets/open-llm-leaderboard/details_JosephusCheung__Pwen-14B-Chat-20_30/blob/main/results_2023-10-27T13-38-56.103845.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.2828229865771812,\n \"em_stderr\": 0.004612221798127954,\n \"f1\": 0.3398972315436241,\n \"f1_stderr\": 0.004521141568402689,\n \"acc\": 0.5173500888298219,\n \"acc_stderr\": 0.012073725510059884\n },\n \"harness|drop|3\": {\n \"em\": 0.2828229865771812,\n \"em_stderr\": 0.004612221798127954,\n \"f1\": 0.3398972315436241,\n \"f1_stderr\": 0.004521141568402689\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.2699014404852161,\n \"acc_stderr\": 0.012227442856468897\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7647987371744278,\n \"acc_stderr\": 0.011920008163650872\n }\n}\n```", "repo_url": "https://huggingface.co/JosephusCheung/Pwen-14B-Chat-20_30", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|arc:challenge|25_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_27T13_38_56.103845", "path": ["**/details_harness|drop|3_2023-10-27T13-38-56.103845.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-27T13-38-56.103845.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_27T13_38_56.103845", "path": ["**/details_harness|gsm8k|5_2023-10-27T13-38-56.103845.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-27T13-38-56.103845.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hellaswag|10_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T18-25-24.586385.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T18-25-24.586385.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T18-25-24.586385.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_27T13_38_56.103845", "path": ["**/details_harness|winogrande|5_2023-10-27T13-38-56.103845.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-27T13-38-56.103845.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_08T18_25_24.586385", "path": ["results_2023-10-08T18-25-24.586385.parquet"]}, {"split": "2023_10_27T13_38_56.103845", "path": ["results_2023-10-27T13-38-56.103845.parquet"]}, {"split": "latest", "path": ["results_2023-10-27T13-38-56.103845.parquet"]}]}]}
2023-10-27T12:39:08+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of JosephusCheung/Pwen-14B-Chat-20_30 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model JosephusCheung/Pwen-14B-Chat-20_30 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-27T13:38:56.103845(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of JosephusCheung/Pwen-14B-Chat-20_30", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model JosephusCheung/Pwen-14B-Chat-20_30 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-27T13:38:56.103845(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of JosephusCheung/Pwen-14B-Chat-20_30", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model JosephusCheung/Pwen-14B-Chat-20_30 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-27T13:38:56.103845(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 23, 31, 171, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of JosephusCheung/Pwen-14B-Chat-20_30## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model JosephusCheung/Pwen-14B-Chat-20_30 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-27T13:38:56.103845(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
0468024ed304339ab728a4836a711cf22a331844
# Dataset Card for Evaluation run of Writer/palmyra-20b-chat ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/Writer/palmyra-20b-chat - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [Writer/palmyra-20b-chat](https://huggingface.co/Writer/palmyra-20b-chat) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Writer__palmyra-20b-chat", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-24T17:34:48.335583](https://huggingface.co/datasets/open-llm-leaderboard/details_Writer__palmyra-20b-chat/blob/main/results_2023-10-24T17-34-48.335583.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.01373741610738255, "em_stderr": 0.0011920334890960986, "f1": 0.07696308724832225, "f1_stderr": 0.0018555585236602612, "acc": 0.3519928816466039, "acc_stderr": 0.009314927967596935 }, "harness|drop|3": { "em": 0.01373741610738255, "em_stderr": 0.0011920334890960986, "f1": 0.07696308724832225, "f1_stderr": 0.0018555585236602612 }, "harness|gsm8k|5": { "acc": 0.039423805913570885, "acc_stderr": 0.005360280030342453 }, "harness|winogrande|5": { "acc": 0.664561957379637, "acc_stderr": 0.013269575904851418 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_Writer__palmyra-20b-chat
[ "region:us" ]
2023-10-08T17:46:21+00:00
{"pretty_name": "Evaluation run of Writer/palmyra-20b-chat", "dataset_summary": "Dataset automatically created during the evaluation run of model [Writer/palmyra-20b-chat](https://huggingface.co/Writer/palmyra-20b-chat) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Writer__palmyra-20b-chat\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-24T17:34:48.335583](https://huggingface.co/datasets/open-llm-leaderboard/details_Writer__palmyra-20b-chat/blob/main/results_2023-10-24T17-34-48.335583.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.01373741610738255,\n \"em_stderr\": 0.0011920334890960986,\n \"f1\": 0.07696308724832225,\n \"f1_stderr\": 0.0018555585236602612,\n \"acc\": 0.3519928816466039,\n \"acc_stderr\": 0.009314927967596935\n },\n \"harness|drop|3\": {\n \"em\": 0.01373741610738255,\n \"em_stderr\": 0.0011920334890960986,\n \"f1\": 0.07696308724832225,\n \"f1_stderr\": 0.0018555585236602612\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.039423805913570885,\n \"acc_stderr\": 0.005360280030342453\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.664561957379637,\n \"acc_stderr\": 0.013269575904851418\n }\n}\n```", "repo_url": "https://huggingface.co/Writer/palmyra-20b-chat", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|arc:challenge|25_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_24T17_34_48.335583", "path": ["**/details_harness|drop|3_2023-10-24T17-34-48.335583.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-24T17-34-48.335583.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_24T17_34_48.335583", "path": ["**/details_harness|gsm8k|5_2023-10-24T17-34-48.335583.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-24T17-34-48.335583.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hellaswag|10_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T18-46-04.606475.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T18-46-04.606475.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T18-46-04.606475.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_24T17_34_48.335583", "path": ["**/details_harness|winogrande|5_2023-10-24T17-34-48.335583.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-24T17-34-48.335583.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_08T18_46_04.606475", "path": ["results_2023-10-08T18-46-04.606475.parquet"]}, {"split": "2023_10_24T17_34_48.335583", "path": ["results_2023-10-24T17-34-48.335583.parquet"]}, {"split": "latest", "path": ["results_2023-10-24T17-34-48.335583.parquet"]}]}]}
2023-10-24T16:35:00+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Writer/palmyra-20b-chat ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model Writer/palmyra-20b-chat on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-24T17:34:48.335583(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of Writer/palmyra-20b-chat", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Writer/palmyra-20b-chat on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-24T17:34:48.335583(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Writer/palmyra-20b-chat", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Writer/palmyra-20b-chat on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-24T17:34:48.335583(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 20, 31, 168, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Writer/palmyra-20b-chat## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model Writer/palmyra-20b-chat on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-24T17:34:48.335583(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
e74d00487251157014e8263000cbcec48eae0349
# Dataset Card for "all_medical" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
atom92/all_medical
[ "region:us" ]
2023-10-08T17:49:08+00:00
{"dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 44683704, "num_examples": 52105}], "download_size": 19461357, "dataset_size": 44683704}}
2023-10-08T17:49:30+00:00
[]
[]
TAGS #region-us
# Dataset Card for "all_medical" More Information needed
[ "# Dataset Card for \"all_medical\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"all_medical\"\n\nMore Information needed" ]
[ 6, 14 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"all_medical\"\n\nMore Information needed" ]
b8b56e56e87e73c49662a1a70387ef37cb7136e7
# Dataset Card for Evaluation run of harborwater/wizard-orca-3b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/harborwater/wizard-orca-3b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [harborwater/wizard-orca-3b](https://huggingface.co/harborwater/wizard-orca-3b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_harborwater__wizard-orca-3b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-24T08:46:00.865464](https://huggingface.co/datasets/open-llm-leaderboard/details_harborwater__wizard-orca-3b/blob/main/results_2023-10-24T08-46-00.865464.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0019924496644295304, "em_stderr": 0.00045666764626669333, "f1": 0.05503670302013434, "f1_stderr": 0.0013533156474354355, "acc": 0.33995582743378455, "acc_stderr": 0.008022574604695198 }, "harness|drop|3": { "em": 0.0019924496644295304, "em_stderr": 0.00045666764626669333, "f1": 0.05503670302013434, "f1_stderr": 0.0013533156474354355 }, "harness|gsm8k|5": { "acc": 0.01061410159211524, "acc_stderr": 0.002822713322387704 }, "harness|winogrande|5": { "acc": 0.6692975532754538, "acc_stderr": 0.013222435887002691 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_harborwater__wizard-orca-3b
[ "region:us" ]
2023-10-08T18:21:36+00:00
{"pretty_name": "Evaluation run of harborwater/wizard-orca-3b", "dataset_summary": "Dataset automatically created during the evaluation run of model [harborwater/wizard-orca-3b](https://huggingface.co/harborwater/wizard-orca-3b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_harborwater__wizard-orca-3b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-24T08:46:00.865464](https://huggingface.co/datasets/open-llm-leaderboard/details_harborwater__wizard-orca-3b/blob/main/results_2023-10-24T08-46-00.865464.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0019924496644295304,\n \"em_stderr\": 0.00045666764626669333,\n \"f1\": 0.05503670302013434,\n \"f1_stderr\": 0.0013533156474354355,\n \"acc\": 0.33995582743378455,\n \"acc_stderr\": 0.008022574604695198\n },\n \"harness|drop|3\": {\n \"em\": 0.0019924496644295304,\n \"em_stderr\": 0.00045666764626669333,\n \"f1\": 0.05503670302013434,\n \"f1_stderr\": 0.0013533156474354355\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.01061410159211524,\n \"acc_stderr\": 0.002822713322387704\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6692975532754538,\n \"acc_stderr\": 0.013222435887002691\n }\n}\n```", "repo_url": "https://huggingface.co/harborwater/wizard-orca-3b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|arc:challenge|25_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_24T08_46_00.865464", "path": ["**/details_harness|drop|3_2023-10-24T08-46-00.865464.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-24T08-46-00.865464.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_24T08_46_00.865464", "path": ["**/details_harness|gsm8k|5_2023-10-24T08-46-00.865464.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-24T08-46-00.865464.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hellaswag|10_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T19-21-18.723038.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T19-21-18.723038.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T19-21-18.723038.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_24T08_46_00.865464", "path": ["**/details_harness|winogrande|5_2023-10-24T08-46-00.865464.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-24T08-46-00.865464.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_08T19_21_18.723038", "path": ["results_2023-10-08T19-21-18.723038.parquet"]}, {"split": "2023_10_24T08_46_00.865464", "path": ["results_2023-10-24T08-46-00.865464.parquet"]}, {"split": "latest", "path": ["results_2023-10-24T08-46-00.865464.parquet"]}]}]}
2023-10-24T07:46:12+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of harborwater/wizard-orca-3b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model harborwater/wizard-orca-3b on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-24T08:46:00.865464(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of harborwater/wizard-orca-3b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model harborwater/wizard-orca-3b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-24T08:46:00.865464(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of harborwater/wizard-orca-3b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model harborwater/wizard-orca-3b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-24T08:46:00.865464(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 20, 31, 168, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of harborwater/wizard-orca-3b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model harborwater/wizard-orca-3b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-24T08:46:00.865464(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
7e128513ff91ed9a25c94896145ee6102ee4cfb2
# Dataset Card for "kbd_speech_preprocessed_for_whisper_training" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
anzorq/kbd_speech_preprocessed_for_whisper_training
[ "region:us" ]
2023-10-08T18:28:17+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "input_features", "sequence": {"sequence": "float32"}}, {"name": "labels", "sequence": "int64"}], "splits": [{"name": "train", "num_bytes": 17767327304, "num_examples": 18499}, {"name": "test", "num_bytes": 1974680696, "num_examples": 2056}], "download_size": 1602763861, "dataset_size": 19742008000}}
2023-10-08T18:34:44+00:00
[]
[]
TAGS #region-us
# Dataset Card for "kbd_speech_preprocessed_for_whisper_training" More Information needed
[ "# Dataset Card for \"kbd_speech_preprocessed_for_whisper_training\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"kbd_speech_preprocessed_for_whisper_training\"\n\nMore Information needed" ]
[ 6, 27 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"kbd_speech_preprocessed_for_whisper_training\"\n\nMore Information needed" ]
6d12c8aa0ca99858bf72a07061f492c19b08f6ba
# Dataset Card for "github-issues" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
MattStammers/github-issues
[ "region:us" ]
2023-10-08T18:29:24+00:00
{"dataset_info": {"features": [{"name": "url", "dtype": "string"}, {"name": "repository_url", "dtype": "string"}, {"name": "labels_url", "dtype": "string"}, {"name": "comments_url", "dtype": "string"}, {"name": "events_url", "dtype": "string"}, {"name": "html_url", "dtype": "string"}, {"name": "id", "dtype": "int64"}, {"name": "node_id", "dtype": "string"}, {"name": "number", "dtype": "int64"}, {"name": "title", "dtype": "string"}, {"name": "user", "struct": [{"name": "login", "dtype": "string"}, {"name": "id", "dtype": "int64"}, {"name": "node_id", "dtype": "string"}, {"name": "avatar_url", "dtype": "string"}, {"name": "gravatar_id", "dtype": "string"}, {"name": "url", "dtype": "string"}, {"name": "html_url", "dtype": "string"}, {"name": "followers_url", "dtype": "string"}, {"name": "following_url", "dtype": "string"}, {"name": "gists_url", "dtype": "string"}, {"name": "starred_url", "dtype": "string"}, {"name": "subscriptions_url", "dtype": "string"}, {"name": "organizations_url", "dtype": "string"}, {"name": "repos_url", "dtype": "string"}, {"name": "events_url", "dtype": "string"}, {"name": "received_events_url", "dtype": "string"}, {"name": "type", "dtype": "string"}, {"name": "site_admin", "dtype": "bool"}]}, {"name": "labels", "list": [{"name": "id", "dtype": "int64"}, {"name": "node_id", "dtype": "string"}, {"name": "url", "dtype": "string"}, {"name": "name", "dtype": "string"}, {"name": "color", "dtype": "string"}, {"name": "default", "dtype": "bool"}, {"name": "description", "dtype": "string"}]}, {"name": "state", "dtype": "string"}, {"name": "locked", "dtype": "bool"}, {"name": "assignee", "struct": [{"name": "login", "dtype": "string"}, {"name": "id", "dtype": "int64"}, {"name": "node_id", "dtype": "string"}, {"name": "avatar_url", "dtype": "string"}, {"name": "gravatar_id", "dtype": "string"}, {"name": "url", "dtype": "string"}, {"name": "html_url", "dtype": "string"}, {"name": "followers_url", "dtype": "string"}, {"name": "following_url", "dtype": "string"}, {"name": "gists_url", "dtype": "string"}, {"name": "starred_url", "dtype": "string"}, {"name": "subscriptions_url", "dtype": "string"}, {"name": "organizations_url", "dtype": "string"}, {"name": "repos_url", "dtype": "string"}, {"name": "events_url", "dtype": "string"}, {"name": "received_events_url", "dtype": "string"}, {"name": "type", "dtype": "string"}, {"name": "site_admin", "dtype": "bool"}]}, {"name": "assignees", "list": [{"name": "login", "dtype": "string"}, {"name": "id", "dtype": "int64"}, {"name": "node_id", "dtype": "string"}, {"name": "avatar_url", "dtype": "string"}, {"name": "gravatar_id", "dtype": "string"}, {"name": "url", "dtype": "string"}, {"name": "html_url", "dtype": "string"}, {"name": "followers_url", "dtype": "string"}, {"name": "following_url", "dtype": "string"}, {"name": "gists_url", "dtype": "string"}, {"name": "starred_url", "dtype": "string"}, {"name": "subscriptions_url", "dtype": "string"}, {"name": "organizations_url", "dtype": "string"}, {"name": "repos_url", "dtype": "string"}, {"name": "events_url", "dtype": "string"}, {"name": "received_events_url", "dtype": "string"}, {"name": "type", "dtype": "string"}, {"name": "site_admin", "dtype": "bool"}]}, {"name": "milestone", "struct": [{"name": "url", "dtype": "string"}, {"name": "html_url", "dtype": "string"}, {"name": "labels_url", "dtype": "string"}, {"name": "id", "dtype": "int64"}, {"name": "node_id", "dtype": "string"}, {"name": "number", "dtype": "int64"}, {"name": "title", "dtype": "string"}, {"name": "description", "dtype": "string"}, {"name": "creator", "struct": [{"name": "login", "dtype": "string"}, {"name": "id", "dtype": "int64"}, {"name": "node_id", "dtype": "string"}, {"name": "avatar_url", "dtype": "string"}, {"name": "gravatar_id", "dtype": "string"}, {"name": "url", "dtype": "string"}, {"name": "html_url", "dtype": "string"}, {"name": "followers_url", "dtype": "string"}, {"name": "following_url", "dtype": "string"}, {"name": "gists_url", "dtype": "string"}, {"name": "starred_url", "dtype": "string"}, {"name": "subscriptions_url", "dtype": "string"}, {"name": "organizations_url", "dtype": "string"}, {"name": "repos_url", "dtype": "string"}, {"name": "events_url", "dtype": "string"}, {"name": "received_events_url", "dtype": "string"}, {"name": "type", "dtype": "string"}, {"name": "site_admin", "dtype": "bool"}]}, {"name": "open_issues", "dtype": "int64"}, {"name": "closed_issues", "dtype": "int64"}, {"name": "state", "dtype": "string"}, {"name": "created_at", "dtype": "int64"}, {"name": "updated_at", "dtype": "int64"}, {"name": "due_on", "dtype": "int64"}, {"name": "closed_at", "dtype": "int64"}]}, {"name": "comments", "sequence": "string"}, {"name": "created_at", "dtype": "int64"}, {"name": "updated_at", "dtype": "int64"}, {"name": "closed_at", "dtype": "int64"}, {"name": "author_association", "dtype": "string"}, {"name": "active_lock_reason", "dtype": "null"}, {"name": "pull_request", "struct": [{"name": "url", "dtype": "string"}, {"name": "html_url", "dtype": "string"}, {"name": "diff_url", "dtype": "string"}, {"name": "patch_url", "dtype": "string"}]}, {"name": "body", "dtype": "string"}, {"name": "timeline_url", "dtype": "string"}, {"name": "performed_via_github_app", "dtype": "null"}, {"name": "is_pull_request", "dtype": "bool"}], "splits": [{"name": "train", "num_bytes": 10438387, "num_examples": 3019}], "download_size": 3102817, "dataset_size": 10438387}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-08T18:29:37+00:00
[]
[]
TAGS #region-us
# Dataset Card for "github-issues" More Information needed
[ "# Dataset Card for \"github-issues\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"github-issues\"\n\nMore Information needed" ]
[ 6, 15 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"github-issues\"\n\nMore Information needed" ]
f40c3b8f7b88759afc9190206914105df5b27503
# Dataset Card for "hieu-edu-date" Left: 5626 rows - 0.18 % [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
H4438/hieu-edu-date
[ "region:us" ]
2023-10-08T18:35:06+00:00
{"dataset_info": {"features": [{"name": "title", "dtype": "string"}, {"name": "body", "dtype": "string"}, {"name": "url", "dtype": "string"}, {"name": "dates", "sequence": "string"}, {"name": "est_date", "dtype": "string"}, {"name": "ext_dates", "sequence": "string"}, {"name": "flt_dates", "sequence": "string"}], "splits": [{"name": "train", "num_bytes": 413651688, "num_examples": 30758}], "download_size": 0, "dataset_size": 413651688}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-08T18:48:51+00:00
[]
[]
TAGS #region-us
# Dataset Card for "hieu-edu-date" Left: 5626 rows - 0.18 % More Information needed
[ "# Dataset Card for \"hieu-edu-date\"\nLeft: 5626 rows - 0.18 %\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"hieu-edu-date\"\nLeft: 5626 rows - 0.18 %\n\nMore Information needed" ]
[ 6, 29 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"hieu-edu-date\"\nLeft: 5626 rows - 0.18 %\n\nMore Information needed" ]
3beb9bdc11d371e48f8c0940948df64ce232dce5
# Dataset Card for Evaluation run of chargoddard/duplicitous-slurpbeast-13b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/chargoddard/duplicitous-slurpbeast-13b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [chargoddard/duplicitous-slurpbeast-13b](https://huggingface.co/chargoddard/duplicitous-slurpbeast-13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_chargoddard__duplicitous-slurpbeast-13b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-26T12:10:38.195509](https://huggingface.co/datasets/open-llm-leaderboard/details_chargoddard__duplicitous-slurpbeast-13b/blob/main/results_2023-10-26T12-10-38.195509.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.022651006711409395, "em_stderr": 0.0015237307803438113, "f1": 0.10978607382550301, "f1_stderr": 0.0022271926416287282, "acc": 0.41926868133939454, "acc_stderr": 0.009980675697209198 }, "harness|drop|3": { "em": 0.022651006711409395, "em_stderr": 0.0015237307803438113, "f1": 0.10978607382550301, "f1_stderr": 0.0022271926416287282 }, "harness|gsm8k|5": { "acc": 0.08794541319181198, "acc_stderr": 0.007801162197487707 }, "harness|winogrande|5": { "acc": 0.7505919494869772, "acc_stderr": 0.012160189196930689 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_chargoddard__duplicitous-slurpbeast-13b
[ "region:us" ]
2023-10-08T18:36:13+00:00
{"pretty_name": "Evaluation run of chargoddard/duplicitous-slurpbeast-13b", "dataset_summary": "Dataset automatically created during the evaluation run of model [chargoddard/duplicitous-slurpbeast-13b](https://huggingface.co/chargoddard/duplicitous-slurpbeast-13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_chargoddard__duplicitous-slurpbeast-13b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-26T12:10:38.195509](https://huggingface.co/datasets/open-llm-leaderboard/details_chargoddard__duplicitous-slurpbeast-13b/blob/main/results_2023-10-26T12-10-38.195509.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.022651006711409395,\n \"em_stderr\": 0.0015237307803438113,\n \"f1\": 0.10978607382550301,\n \"f1_stderr\": 0.0022271926416287282,\n \"acc\": 0.41926868133939454,\n \"acc_stderr\": 0.009980675697209198\n },\n \"harness|drop|3\": {\n \"em\": 0.022651006711409395,\n \"em_stderr\": 0.0015237307803438113,\n \"f1\": 0.10978607382550301,\n \"f1_stderr\": 0.0022271926416287282\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.08794541319181198,\n \"acc_stderr\": 0.007801162197487707\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7505919494869772,\n \"acc_stderr\": 0.012160189196930689\n }\n}\n```", "repo_url": "https://huggingface.co/chargoddard/duplicitous-slurpbeast-13b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|arc:challenge|25_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_26T12_10_38.195509", "path": ["**/details_harness|drop|3_2023-10-26T12-10-38.195509.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-26T12-10-38.195509.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_26T12_10_38.195509", "path": ["**/details_harness|gsm8k|5_2023-10-26T12-10-38.195509.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-26T12-10-38.195509.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hellaswag|10_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T19-35-50.428127.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T19-35-50.428127.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T19-35-50.428127.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_26T12_10_38.195509", "path": ["**/details_harness|winogrande|5_2023-10-26T12-10-38.195509.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-26T12-10-38.195509.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_08T19_35_50.428127", "path": ["results_2023-10-08T19-35-50.428127.parquet"]}, {"split": "2023_10_26T12_10_38.195509", "path": ["results_2023-10-26T12-10-38.195509.parquet"]}, {"split": "latest", "path": ["results_2023-10-26T12-10-38.195509.parquet"]}]}]}
2023-10-26T11:10:51+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of chargoddard/duplicitous-slurpbeast-13b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model chargoddard/duplicitous-slurpbeast-13b on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-26T12:10:38.195509(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of chargoddard/duplicitous-slurpbeast-13b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model chargoddard/duplicitous-slurpbeast-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-26T12:10:38.195509(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of chargoddard/duplicitous-slurpbeast-13b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model chargoddard/duplicitous-slurpbeast-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-26T12:10:38.195509(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 25, 31, 173, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of chargoddard/duplicitous-slurpbeast-13b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model chargoddard/duplicitous-slurpbeast-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-26T12:10:38.195509(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
59741408a0be90669553f933fc96b325dd239936
# Dataset Card for Evaluation run of chargoddard/duplicitous-mammal-13b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/chargoddard/duplicitous-mammal-13b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [chargoddard/duplicitous-mammal-13b](https://huggingface.co/chargoddard/duplicitous-mammal-13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_chargoddard__duplicitous-mammal-13b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-28T12:16:35.261597](https://huggingface.co/datasets/open-llm-leaderboard/details_chargoddard__duplicitous-mammal-13b/blob/main/results_2023-10-28T12-16-35.261597.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.011115771812080536, "em_stderr": 0.0010736981082190872, "f1": 0.09195050335570452, "f1_stderr": 0.0019265640812138418, "acc": 0.42078498156683963, "acc_stderr": 0.01004075602047218 }, "harness|drop|3": { "em": 0.011115771812080536, "em_stderr": 0.0010736981082190872, "f1": 0.09195050335570452, "f1_stderr": 0.0019265640812138418 }, "harness|gsm8k|5": { "acc": 0.09097801364670205, "acc_stderr": 0.00792132284401367 }, "harness|winogrande|5": { "acc": 0.7505919494869772, "acc_stderr": 0.012160189196930689 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_chargoddard__duplicitous-mammal-13b
[ "region:us" ]
2023-10-08T18:36:39+00:00
{"pretty_name": "Evaluation run of chargoddard/duplicitous-mammal-13b", "dataset_summary": "Dataset automatically created during the evaluation run of model [chargoddard/duplicitous-mammal-13b](https://huggingface.co/chargoddard/duplicitous-mammal-13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_chargoddard__duplicitous-mammal-13b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-28T12:16:35.261597](https://huggingface.co/datasets/open-llm-leaderboard/details_chargoddard__duplicitous-mammal-13b/blob/main/results_2023-10-28T12-16-35.261597.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.011115771812080536,\n \"em_stderr\": 0.0010736981082190872,\n \"f1\": 0.09195050335570452,\n \"f1_stderr\": 0.0019265640812138418,\n \"acc\": 0.42078498156683963,\n \"acc_stderr\": 0.01004075602047218\n },\n \"harness|drop|3\": {\n \"em\": 0.011115771812080536,\n \"em_stderr\": 0.0010736981082190872,\n \"f1\": 0.09195050335570452,\n \"f1_stderr\": 0.0019265640812138418\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.09097801364670205,\n \"acc_stderr\": 0.00792132284401367\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7505919494869772,\n \"acc_stderr\": 0.012160189196930689\n }\n}\n```", "repo_url": "https://huggingface.co/chargoddard/duplicitous-mammal-13b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|arc:challenge|25_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_28T12_16_35.261597", "path": ["**/details_harness|drop|3_2023-10-28T12-16-35.261597.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-28T12-16-35.261597.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_28T12_16_35.261597", "path": ["**/details_harness|gsm8k|5_2023-10-28T12-16-35.261597.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-28T12-16-35.261597.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hellaswag|10_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T19-36-16.264447.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T19-36-16.264447.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T19-36-16.264447.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_28T12_16_35.261597", "path": ["**/details_harness|winogrande|5_2023-10-28T12-16-35.261597.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-28T12-16-35.261597.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_08T19_36_16.264447", "path": ["results_2023-10-08T19-36-16.264447.parquet"]}, {"split": "2023_10_28T12_16_35.261597", "path": ["results_2023-10-28T12-16-35.261597.parquet"]}, {"split": "latest", "path": ["results_2023-10-28T12-16-35.261597.parquet"]}]}]}
2023-10-28T11:16:47+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of chargoddard/duplicitous-mammal-13b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model chargoddard/duplicitous-mammal-13b on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-28T12:16:35.261597(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of chargoddard/duplicitous-mammal-13b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model chargoddard/duplicitous-mammal-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-28T12:16:35.261597(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of chargoddard/duplicitous-mammal-13b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model chargoddard/duplicitous-mammal-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-28T12:16:35.261597(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 23, 31, 171, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of chargoddard/duplicitous-mammal-13b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model chargoddard/duplicitous-mammal-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-28T12:16:35.261597(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
6e79eba0e329a67cd05bc95d6bd95ad85d9b47ef
# Dataset Card for Evaluation run of caisarl76/mistral-guanaco1k-ep2 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/caisarl76/mistral-guanaco1k-ep2 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [caisarl76/mistral-guanaco1k-ep2](https://huggingface.co/caisarl76/mistral-guanaco1k-ep2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_caisarl76__mistral-guanaco1k-ep2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-23T09:23:27.252152](https://huggingface.co/datasets/open-llm-leaderboard/details_caisarl76__mistral-guanaco1k-ep2/blob/main/results_2023-10-23T09-23-27.252152.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.002307046979865772, "em_stderr": 0.0004913221265094507, "f1": 0.06542994966442944, "f1_stderr": 0.001488633695023099, "acc": 0.4501858873976542, "acc_stderr": 0.010287740882080417 }, "harness|drop|3": { "em": 0.002307046979865772, "em_stderr": 0.0004913221265094507, "f1": 0.06542994966442944, "f1_stderr": 0.001488633695023099 }, "harness|gsm8k|5": { "acc": 0.1197877179681577, "acc_stderr": 0.008944213403553055 }, "harness|winogrande|5": { "acc": 0.7805840568271507, "acc_stderr": 0.01163126836060778 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_caisarl76__mistral-guanaco1k-ep2
[ "region:us" ]
2023-10-08T18:44:10+00:00
{"pretty_name": "Evaluation run of caisarl76/mistral-guanaco1k-ep2", "dataset_summary": "Dataset automatically created during the evaluation run of model [caisarl76/mistral-guanaco1k-ep2](https://huggingface.co/caisarl76/mistral-guanaco1k-ep2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_caisarl76__mistral-guanaco1k-ep2\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-23T09:23:27.252152](https://huggingface.co/datasets/open-llm-leaderboard/details_caisarl76__mistral-guanaco1k-ep2/blob/main/results_2023-10-23T09-23-27.252152.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.002307046979865772,\n \"em_stderr\": 0.0004913221265094507,\n \"f1\": 0.06542994966442944,\n \"f1_stderr\": 0.001488633695023099,\n \"acc\": 0.4501858873976542,\n \"acc_stderr\": 0.010287740882080417\n },\n \"harness|drop|3\": {\n \"em\": 0.002307046979865772,\n \"em_stderr\": 0.0004913221265094507,\n \"f1\": 0.06542994966442944,\n \"f1_stderr\": 0.001488633695023099\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.1197877179681577,\n \"acc_stderr\": 0.008944213403553055\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7805840568271507,\n \"acc_stderr\": 0.01163126836060778\n }\n}\n```", "repo_url": "https://huggingface.co/caisarl76/mistral-guanaco1k-ep2", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|arc:challenge|25_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_23T09_23_27.252152", "path": ["**/details_harness|drop|3_2023-10-23T09-23-27.252152.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-23T09-23-27.252152.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_23T09_23_27.252152", "path": ["**/details_harness|gsm8k|5_2023-10-23T09-23-27.252152.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-23T09-23-27.252152.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hellaswag|10_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T19-43-46.755661.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T19-43-46.755661.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T19-43-46.755661.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_23T09_23_27.252152", "path": ["**/details_harness|winogrande|5_2023-10-23T09-23-27.252152.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-23T09-23-27.252152.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_08T19_43_46.755661", "path": ["results_2023-10-08T19-43-46.755661.parquet"]}, {"split": "2023_10_23T09_23_27.252152", "path": ["results_2023-10-23T09-23-27.252152.parquet"]}, {"split": "latest", "path": ["results_2023-10-23T09-23-27.252152.parquet"]}]}]}
2023-10-23T08:23:39+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of caisarl76/mistral-guanaco1k-ep2 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model caisarl76/mistral-guanaco1k-ep2 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-23T09:23:27.252152(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of caisarl76/mistral-guanaco1k-ep2", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model caisarl76/mistral-guanaco1k-ep2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-23T09:23:27.252152(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of caisarl76/mistral-guanaco1k-ep2", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model caisarl76/mistral-guanaco1k-ep2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-23T09:23:27.252152(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 25, 31, 173, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of caisarl76/mistral-guanaco1k-ep2## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model caisarl76/mistral-guanaco1k-ep2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-23T09:23:27.252152(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
03154c7acc6e29b5ef01aaecfa09045f4f702259
# VoxCelebSpoof VoxCelebSpoof is a dataset related to detecting spoofing attacks on automatic speaker verification systems. This dataset is part of a broader effort to improve the security of voice biometric systems against various types of spoofing attacks, such as replay attacks, voice synthesis, and voice conversion. ## Dataset Details ### Dataset Description The VoxCelebSpoof dataset includes a range of audio samples from different types of synthesis spoofs. The goal of the dataset is to develop systems that can accurately distinguish between genuine and spoofed audio samples. Key features and objectives of VoxCelebSpoof include: - **Data Diversity:** The dataset is derived from VoxCeleb, a large-scale speaker identification dataset containing celebrity interviews. Due to this, the spoofing detection models trained on VoxCelebSpoof are exposed to various accents, languages, and acoustic environments. - **Synthetic Varieties:** The spoofs include a variety of synthetic (TTS) attacks, such as high-quality synthetic speech, using AI-based voice cloning, and challenging systems to recognise and defend against a range of synthetic vulnerabilities. - **Benchmarking:** VoxCelebSpoof can serve as a benchmark for comparing the performance of different spoofing detection systems under standardised conditions. - **Research and Development:** The dataset encourages the research community to innovate in anti-spoofing for voice biometric systems, promoting advancements in techniques like feature extraction, classification algorithms, and deep learning. - **Curated by:** Matthew Boakes - **Funded by:** Bill & Melinda Gates Foundation - **Shared by:** Alan Turing Institute - **Language(s) (NLP):** English - **License:** MIT ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
MattyB95/VoxCelebSpoof
[ "task_categories:audio-classification", "task_categories:text-to-speech", "size_categories:100K<n<1M", "language:en", "license:mit", "code", "region:us" ]
2023-10-08T18:47:48+00:00
{"language": ["en"], "license": "mit", "size_categories": ["100K<n<1M"], "task_categories": ["audio-classification", "text-to-speech"], "pretty_name": "VoxCelebSpoof", "tags": ["code"]}
2024-01-31T14:45:04+00:00
[]
[ "en" ]
TAGS #task_categories-audio-classification #task_categories-text-to-speech #size_categories-100K<n<1M #language-English #license-mit #code #region-us
# VoxCelebSpoof VoxCelebSpoof is a dataset related to detecting spoofing attacks on automatic speaker verification systems. This dataset is part of a broader effort to improve the security of voice biometric systems against various types of spoofing attacks, such as replay attacks, voice synthesis, and voice conversion. ## Dataset Details ### Dataset Description The VoxCelebSpoof dataset includes a range of audio samples from different types of synthesis spoofs. The goal of the dataset is to develop systems that can accurately distinguish between genuine and spoofed audio samples. Key features and objectives of VoxCelebSpoof include: - Data Diversity: The dataset is derived from VoxCeleb, a large-scale speaker identification dataset containing celebrity interviews. Due to this, the spoofing detection models trained on VoxCelebSpoof are exposed to various accents, languages, and acoustic environments. - Synthetic Varieties: The spoofs include a variety of synthetic (TTS) attacks, such as high-quality synthetic speech, using AI-based voice cloning, and challenging systems to recognise and defend against a range of synthetic vulnerabilities. - Benchmarking: VoxCelebSpoof can serve as a benchmark for comparing the performance of different spoofing detection systems under standardised conditions. - Research and Development: The dataset encourages the research community to innovate in anti-spoofing for voice biometric systems, promoting advancements in techniques like feature extraction, classification algorithms, and deep learning. - Curated by: Matthew Boakes - Funded by: Bill & Melinda Gates Foundation - Shared by: Alan Turing Institute - Language(s) (NLP): English - License: MIT ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# VoxCelebSpoof\n\nVoxCelebSpoof is a dataset related to detecting spoofing attacks on automatic speaker verification systems. This dataset is part of a broader effort to improve the security of voice biometric systems against various types of spoofing attacks, such as replay attacks, voice synthesis, and voice conversion.", "## Dataset Details", "### Dataset Description\n\nThe VoxCelebSpoof dataset includes a range of audio samples from different types of synthesis spoofs. The goal of the dataset is to develop systems that can accurately distinguish between genuine and spoofed audio samples.\n\nKey features and objectives of VoxCelebSpoof include:\n\n- Data Diversity: The dataset is derived from VoxCeleb, a large-scale speaker identification dataset containing celebrity interviews. Due to this, the spoofing detection models trained on VoxCelebSpoof are exposed to various accents, languages, and acoustic environments.\n- Synthetic Varieties: The spoofs include a variety of synthetic (TTS) attacks, such as high-quality synthetic speech, using AI-based voice cloning, and challenging systems to recognise and defend against a range of synthetic vulnerabilities.\n- Benchmarking: VoxCelebSpoof can serve as a benchmark for comparing the performance of different spoofing detection systems under standardised conditions.\n- Research and Development: The dataset encourages the research community to innovate in anti-spoofing for voice biometric systems, promoting advancements in techniques like feature extraction, classification algorithms, and deep learning.\n\n\n- Curated by: Matthew Boakes\n- Funded by: Bill & Melinda Gates Foundation\n- Shared by: Alan Turing Institute\n- Language(s) (NLP): English\n- License: MIT", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#task_categories-audio-classification #task_categories-text-to-speech #size_categories-100K<n<1M #language-English #license-mit #code #region-us \n", "# VoxCelebSpoof\n\nVoxCelebSpoof is a dataset related to detecting spoofing attacks on automatic speaker verification systems. This dataset is part of a broader effort to improve the security of voice biometric systems against various types of spoofing attacks, such as replay attacks, voice synthesis, and voice conversion.", "## Dataset Details", "### Dataset Description\n\nThe VoxCelebSpoof dataset includes a range of audio samples from different types of synthesis spoofs. The goal of the dataset is to develop systems that can accurately distinguish between genuine and spoofed audio samples.\n\nKey features and objectives of VoxCelebSpoof include:\n\n- Data Diversity: The dataset is derived from VoxCeleb, a large-scale speaker identification dataset containing celebrity interviews. Due to this, the spoofing detection models trained on VoxCelebSpoof are exposed to various accents, languages, and acoustic environments.\n- Synthetic Varieties: The spoofs include a variety of synthetic (TTS) attacks, such as high-quality synthetic speech, using AI-based voice cloning, and challenging systems to recognise and defend against a range of synthetic vulnerabilities.\n- Benchmarking: VoxCelebSpoof can serve as a benchmark for comparing the performance of different spoofing detection systems under standardised conditions.\n- Research and Development: The dataset encourages the research community to innovate in anti-spoofing for voice biometric systems, promoting advancements in techniques like feature extraction, classification algorithms, and deep learning.\n\n\n- Curated by: Matthew Boakes\n- Funded by: Bill & Melinda Gates Foundation\n- Shared by: Alan Turing Institute\n- Language(s) (NLP): English\n- License: MIT", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 54, 80, 4, 337, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#task_categories-audio-classification #task_categories-text-to-speech #size_categories-100K<n<1M #language-English #license-mit #code #region-us \n# VoxCelebSpoof\n\nVoxCelebSpoof is a dataset related to detecting spoofing attacks on automatic speaker verification systems. This dataset is part of a broader effort to improve the security of voice biometric systems against various types of spoofing attacks, such as replay attacks, voice synthesis, and voice conversion.## Dataset Details### Dataset Description\n\nThe VoxCelebSpoof dataset includes a range of audio samples from different types of synthesis spoofs. The goal of the dataset is to develop systems that can accurately distinguish between genuine and spoofed audio samples.\n\nKey features and objectives of VoxCelebSpoof include:\n\n- Data Diversity: The dataset is derived from VoxCeleb, a large-scale speaker identification dataset containing celebrity interviews. Due to this, the spoofing detection models trained on VoxCelebSpoof are exposed to various accents, languages, and acoustic environments.\n- Synthetic Varieties: The spoofs include a variety of synthetic (TTS) attacks, such as high-quality synthetic speech, using AI-based voice cloning, and challenging systems to recognise and defend against a range of synthetic vulnerabilities.\n- Benchmarking: VoxCelebSpoof can serve as a benchmark for comparing the performance of different spoofing detection systems under standardised conditions.\n- Research and Development: The dataset encourages the research community to innovate in anti-spoofing for voice biometric systems, promoting advancements in techniques like feature extraction, classification algorithms, and deep learning.\n\n\n- Curated by: Matthew Boakes\n- Funded by: Bill & Melinda Gates Foundation\n- Shared by: Alan Turing Institute\n- Language(s) (NLP): English\n- License: MIT### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses" ]
53b302e00ee6144ef181f5930dc4a86c354e83c7
# Dataset Card for Evaluation run of beaugogh/Llama2-7b-openorca-mc-v2-dpo ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/beaugogh/Llama2-7b-openorca-mc-v2-dpo - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [beaugogh/Llama2-7b-openorca-mc-v2-dpo](https://huggingface.co/beaugogh/Llama2-7b-openorca-mc-v2-dpo) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_beaugogh__Llama2-7b-openorca-mc-v2-dpo", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-25T22:40:34.930470](https://huggingface.co/datasets/open-llm-leaderboard/details_beaugogh__Llama2-7b-openorca-mc-v2-dpo/blob/main/results_2023-10-25T22-40-34.930470.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0012583892617449664, "em_stderr": 0.0003630560893119234, "f1": 0.05640729865771826, "f1_stderr": 0.0013382113030996202, "acc": 0.38661167934139673, "acc_stderr": 0.00909660619315009 }, "harness|drop|3": { "em": 0.0012583892617449664, "em_stderr": 0.0003630560893119234, "f1": 0.05640729865771826, "f1_stderr": 0.0013382113030996202 }, "harness|gsm8k|5": { "acc": 0.04473085670962851, "acc_stderr": 0.005693886131407052 }, "harness|winogrande|5": { "acc": 0.728492501973165, "acc_stderr": 0.012499326254893129 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_beaugogh__Llama2-7b-openorca-mc-v2-dpo
[ "region:us" ]
2023-10-08T18:52:47+00:00
{"pretty_name": "Evaluation run of beaugogh/Llama2-7b-openorca-mc-v2-dpo", "dataset_summary": "Dataset automatically created during the evaluation run of model [beaugogh/Llama2-7b-openorca-mc-v2-dpo](https://huggingface.co/beaugogh/Llama2-7b-openorca-mc-v2-dpo) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_beaugogh__Llama2-7b-openorca-mc-v2-dpo\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-25T22:40:34.930470](https://huggingface.co/datasets/open-llm-leaderboard/details_beaugogh__Llama2-7b-openorca-mc-v2-dpo/blob/main/results_2023-10-25T22-40-34.930470.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0012583892617449664,\n \"em_stderr\": 0.0003630560893119234,\n \"f1\": 0.05640729865771826,\n \"f1_stderr\": 0.0013382113030996202,\n \"acc\": 0.38661167934139673,\n \"acc_stderr\": 0.00909660619315009\n },\n \"harness|drop|3\": {\n \"em\": 0.0012583892617449664,\n \"em_stderr\": 0.0003630560893119234,\n \"f1\": 0.05640729865771826,\n \"f1_stderr\": 0.0013382113030996202\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.04473085670962851,\n \"acc_stderr\": 0.005693886131407052\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.728492501973165,\n \"acc_stderr\": 0.012499326254893129\n }\n}\n```", "repo_url": "https://huggingface.co/beaugogh/Llama2-7b-openorca-mc-v2-dpo", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|arc:challenge|25_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_25T22_40_34.930470", "path": ["**/details_harness|drop|3_2023-10-25T22-40-34.930470.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-25T22-40-34.930470.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_25T22_40_34.930470", "path": ["**/details_harness|gsm8k|5_2023-10-25T22-40-34.930470.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-25T22-40-34.930470.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hellaswag|10_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T19-52-28.810718.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T19-52-28.810718.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T19-52-28.810718.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_25T22_40_34.930470", "path": ["**/details_harness|winogrande|5_2023-10-25T22-40-34.930470.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-25T22-40-34.930470.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_08T19_52_28.810718", "path": ["results_2023-10-08T19-52-28.810718.parquet"]}, {"split": "2023_10_25T22_40_34.930470", "path": ["results_2023-10-25T22-40-34.930470.parquet"]}, {"split": "latest", "path": ["results_2023-10-25T22-40-34.930470.parquet"]}]}]}
2023-10-25T21:40:48+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of beaugogh/Llama2-7b-openorca-mc-v2-dpo ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model beaugogh/Llama2-7b-openorca-mc-v2-dpo on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-25T22:40:34.930470(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of beaugogh/Llama2-7b-openorca-mc-v2-dpo", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model beaugogh/Llama2-7b-openorca-mc-v2-dpo on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-25T22:40:34.930470(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of beaugogh/Llama2-7b-openorca-mc-v2-dpo", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model beaugogh/Llama2-7b-openorca-mc-v2-dpo on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-25T22:40:34.930470(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 30, 31, 178, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of beaugogh/Llama2-7b-openorca-mc-v2-dpo## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model beaugogh/Llama2-7b-openorca-mc-v2-dpo on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-25T22:40:34.930470(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
6db5fed1d0fd793beb277888aabc711392deec57
# Dataset Card for "Data" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
zardat/Data
[ "region:us" ]
2023-10-08T19:02:14+00:00
{"dataset_info": {"features": [{"name": "x", "sequence": {"sequence": "float32"}}, {"name": "edge_index", "sequence": {"sequence": "float32"}}, {"name": "edge_attr", "sequence": "float32"}, {"name": "y", "dtype": "float32"}], "splits": [{"name": "train", "num_bytes": 2704875888, "num_examples": 10329}], "download_size": 41637007, "dataset_size": 2704875888}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-08T19:05:56+00:00
[]
[]
TAGS #region-us
# Dataset Card for "Data" More Information needed
[ "# Dataset Card for \"Data\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"Data\"\n\nMore Information needed" ]
[ 6, 11 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"Data\"\n\nMore Information needed" ]
72a85ecd6e494c0183a457910cf4d8fcb4354c6b
# Dataset Card for Evaluation run of ibranze/araproje-llama2-7b-hf ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/ibranze/araproje-llama2-7b-hf - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [ibranze/araproje-llama2-7b-hf](https://huggingface.co/ibranze/araproje-llama2-7b-hf) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_ibranze__araproje-llama2-7b-hf", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-26T05:17:54.107073](https://huggingface.co/datasets/open-llm-leaderboard/details_ibranze__araproje-llama2-7b-hf/blob/main/results_2023-10-26T05-17-54.107073.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0012583892617449664, "em_stderr": 0.00036305608931194434, "f1": 0.055925964765100665, "f1_stderr": 0.0013181664771628632, "acc": 0.4057988012013119, "acc_stderr": 0.00970458141675358 }, "harness|drop|3": { "em": 0.0012583892617449664, "em_stderr": 0.00036305608931194434, "f1": 0.055925964765100665, "f1_stderr": 0.0013181664771628632 }, "harness|gsm8k|5": { "acc": 0.0712661106899166, "acc_stderr": 0.007086462127954491 }, "harness|winogrande|5": { "acc": 0.7403314917127072, "acc_stderr": 0.012322700705552667 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_ibranze__araproje-llama2-7b-hf
[ "region:us" ]
2023-10-08T19:04:58+00:00
{"pretty_name": "Evaluation run of ibranze/araproje-llama2-7b-hf", "dataset_summary": "Dataset automatically created during the evaluation run of model [ibranze/araproje-llama2-7b-hf](https://huggingface.co/ibranze/araproje-llama2-7b-hf) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ibranze__araproje-llama2-7b-hf\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-26T05:17:54.107073](https://huggingface.co/datasets/open-llm-leaderboard/details_ibranze__araproje-llama2-7b-hf/blob/main/results_2023-10-26T05-17-54.107073.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0012583892617449664,\n \"em_stderr\": 0.00036305608931194434,\n \"f1\": 0.055925964765100665,\n \"f1_stderr\": 0.0013181664771628632,\n \"acc\": 0.4057988012013119,\n \"acc_stderr\": 0.00970458141675358\n },\n \"harness|drop|3\": {\n \"em\": 0.0012583892617449664,\n \"em_stderr\": 0.00036305608931194434,\n \"f1\": 0.055925964765100665,\n \"f1_stderr\": 0.0013181664771628632\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0712661106899166,\n \"acc_stderr\": 0.007086462127954491\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7403314917127072,\n \"acc_stderr\": 0.012322700705552667\n }\n}\n```", "repo_url": "https://huggingface.co/ibranze/araproje-llama2-7b-hf", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|arc:challenge|25_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_26T05_17_54.107073", "path": ["**/details_harness|drop|3_2023-10-26T05-17-54.107073.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-26T05-17-54.107073.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_26T05_17_54.107073", "path": ["**/details_harness|gsm8k|5_2023-10-26T05-17-54.107073.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-26T05-17-54.107073.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hellaswag|10_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T20-04-34.106747.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T20-04-34.106747.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T20-04-34.106747.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_26T05_17_54.107073", "path": ["**/details_harness|winogrande|5_2023-10-26T05-17-54.107073.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-26T05-17-54.107073.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_08T20_04_34.106747", "path": ["results_2023-10-08T20-04-34.106747.parquet"]}, {"split": "2023_10_26T05_17_54.107073", "path": ["results_2023-10-26T05-17-54.107073.parquet"]}, {"split": "latest", "path": ["results_2023-10-26T05-17-54.107073.parquet"]}]}]}
2023-10-26T04:18:07+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of ibranze/araproje-llama2-7b-hf ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model ibranze/araproje-llama2-7b-hf on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-26T05:17:54.107073(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of ibranze/araproje-llama2-7b-hf", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model ibranze/araproje-llama2-7b-hf on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-26T05:17:54.107073(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of ibranze/araproje-llama2-7b-hf", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model ibranze/araproje-llama2-7b-hf on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-26T05:17:54.107073(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 25, 31, 173, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of ibranze/araproje-llama2-7b-hf## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model ibranze/araproje-llama2-7b-hf on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-26T05:17:54.107073(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
d068a50cccaeb3e25d97206c124356a2fe48852c
# Dataset Card for Evaluation run of PygmalionAI/pygmalion-2-7b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/PygmalionAI/pygmalion-2-7b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [PygmalionAI/pygmalion-2-7b](https://huggingface.co/PygmalionAI/pygmalion-2-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_PygmalionAI__pygmalion-2-7b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-28T09:33:13.706982](https://huggingface.co/datasets/open-llm-leaderboard/details_PygmalionAI__pygmalion-2-7b/blob/main/results_2023-10-28T09-33-13.706982.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.001153523489932886, "em_stderr": 0.00034761798968571027, "f1": 0.05976614932885909, "f1_stderr": 0.0013611207374076375, "acc": 0.4075329125111523, "acc_stderr": 0.009436763896104398 }, "harness|drop|3": { "em": 0.001153523489932886, "em_stderr": 0.00034761798968571027, "f1": 0.05976614932885909, "f1_stderr": 0.0013611207374076375 }, "harness|gsm8k|5": { "acc": 0.06368460955269144, "acc_stderr": 0.006726213078805692 }, "harness|winogrande|5": { "acc": 0.7513812154696132, "acc_stderr": 0.012147314713403105 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_PygmalionAI__pygmalion-2-7b
[ "region:us" ]
2023-10-08T19:23:05+00:00
{"pretty_name": "Evaluation run of PygmalionAI/pygmalion-2-7b", "dataset_summary": "Dataset automatically created during the evaluation run of model [PygmalionAI/pygmalion-2-7b](https://huggingface.co/PygmalionAI/pygmalion-2-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_PygmalionAI__pygmalion-2-7b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-28T09:33:13.706982](https://huggingface.co/datasets/open-llm-leaderboard/details_PygmalionAI__pygmalion-2-7b/blob/main/results_2023-10-28T09-33-13.706982.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.001153523489932886,\n \"em_stderr\": 0.00034761798968571027,\n \"f1\": 0.05976614932885909,\n \"f1_stderr\": 0.0013611207374076375,\n \"acc\": 0.4075329125111523,\n \"acc_stderr\": 0.009436763896104398\n },\n \"harness|drop|3\": {\n \"em\": 0.001153523489932886,\n \"em_stderr\": 0.00034761798968571027,\n \"f1\": 0.05976614932885909,\n \"f1_stderr\": 0.0013611207374076375\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.06368460955269144,\n \"acc_stderr\": 0.006726213078805692\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7513812154696132,\n \"acc_stderr\": 0.012147314713403105\n }\n}\n```", "repo_url": "https://huggingface.co/PygmalionAI/pygmalion-2-7b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|arc:challenge|25_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_28T09_33_13.706982", "path": ["**/details_harness|drop|3_2023-10-28T09-33-13.706982.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-28T09-33-13.706982.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_28T09_33_13.706982", "path": ["**/details_harness|gsm8k|5_2023-10-28T09-33-13.706982.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-28T09-33-13.706982.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hellaswag|10_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T20-22-41.887829.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T20-22-41.887829.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T20-22-41.887829.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_28T09_33_13.706982", "path": ["**/details_harness|winogrande|5_2023-10-28T09-33-13.706982.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-28T09-33-13.706982.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_08T20_22_41.887829", "path": ["results_2023-10-08T20-22-41.887829.parquet"]}, {"split": "2023_10_28T09_33_13.706982", "path": ["results_2023-10-28T09-33-13.706982.parquet"]}, {"split": "latest", "path": ["results_2023-10-28T09-33-13.706982.parquet"]}]}]}
2023-10-28T08:33:25+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of PygmalionAI/pygmalion-2-7b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model PygmalionAI/pygmalion-2-7b on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-28T09:33:13.706982(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of PygmalionAI/pygmalion-2-7b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model PygmalionAI/pygmalion-2-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-28T09:33:13.706982(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of PygmalionAI/pygmalion-2-7b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model PygmalionAI/pygmalion-2-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-28T09:33:13.706982(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 22, 31, 170, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of PygmalionAI/pygmalion-2-7b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model PygmalionAI/pygmalion-2-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-28T09:33:13.706982(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
61eb5f6922bd2061f211dea2a4b81790954031ac
# Dataset Card for Evaluation run of zarakiquemparte/zarablend-1.1-l2-7b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/zarakiquemparte/zarablend-1.1-l2-7b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [zarakiquemparte/zarablend-1.1-l2-7b](https://huggingface.co/zarakiquemparte/zarablend-1.1-l2-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_zarakiquemparte__zarablend-1.1-l2-7b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-24T16:53:24.152575](https://huggingface.co/datasets/open-llm-leaderboard/details_zarakiquemparte__zarablend-1.1-l2-7b/blob/main/results_2023-10-24T16-53-24.152575.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.2829278523489933, "em_stderr": 0.0046127395092502785, "f1": 0.3596476510067135, "f1_stderr": 0.004549657562733716, "acc": 0.38580685542430376, "acc_stderr": 0.009136475194671255 }, "harness|drop|3": { "em": 0.2829278523489933, "em_stderr": 0.0046127395092502785, "f1": 0.3596476510067135, "f1_stderr": 0.004549657562733716 }, "harness|gsm8k|5": { "acc": 0.045489006823351025, "acc_stderr": 0.005739657656722217 }, "harness|winogrande|5": { "acc": 0.7261247040252565, "acc_stderr": 0.012533292732620292 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_zarakiquemparte__zarablend-1.1-l2-7b
[ "region:us" ]
2023-10-08T19:35:02+00:00
{"pretty_name": "Evaluation run of zarakiquemparte/zarablend-1.1-l2-7b", "dataset_summary": "Dataset automatically created during the evaluation run of model [zarakiquemparte/zarablend-1.1-l2-7b](https://huggingface.co/zarakiquemparte/zarablend-1.1-l2-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_zarakiquemparte__zarablend-1.1-l2-7b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-24T16:53:24.152575](https://huggingface.co/datasets/open-llm-leaderboard/details_zarakiquemparte__zarablend-1.1-l2-7b/blob/main/results_2023-10-24T16-53-24.152575.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.2829278523489933,\n \"em_stderr\": 0.0046127395092502785,\n \"f1\": 0.3596476510067135,\n \"f1_stderr\": 0.004549657562733716,\n \"acc\": 0.38580685542430376,\n \"acc_stderr\": 0.009136475194671255\n },\n \"harness|drop|3\": {\n \"em\": 0.2829278523489933,\n \"em_stderr\": 0.0046127395092502785,\n \"f1\": 0.3596476510067135,\n \"f1_stderr\": 0.004549657562733716\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.045489006823351025,\n \"acc_stderr\": 0.005739657656722217\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7261247040252565,\n \"acc_stderr\": 0.012533292732620292\n }\n}\n```", "repo_url": "https://huggingface.co/zarakiquemparte/zarablend-1.1-l2-7b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|arc:challenge|25_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_24T16_53_24.152575", "path": ["**/details_harness|drop|3_2023-10-24T16-53-24.152575.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-24T16-53-24.152575.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_24T16_53_24.152575", "path": ["**/details_harness|gsm8k|5_2023-10-24T16-53-24.152575.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-24T16-53-24.152575.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hellaswag|10_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T20-34-38.320909.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T20-34-38.320909.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T20-34-38.320909.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_24T16_53_24.152575", "path": ["**/details_harness|winogrande|5_2023-10-24T16-53-24.152575.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-24T16-53-24.152575.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_08T20_34_38.320909", "path": ["results_2023-10-08T20-34-38.320909.parquet"]}, {"split": "2023_10_24T16_53_24.152575", "path": ["results_2023-10-24T16-53-24.152575.parquet"]}, {"split": "latest", "path": ["results_2023-10-24T16-53-24.152575.parquet"]}]}]}
2023-10-24T15:53:36+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of zarakiquemparte/zarablend-1.1-l2-7b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model zarakiquemparte/zarablend-1.1-l2-7b on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-24T16:53:24.152575(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of zarakiquemparte/zarablend-1.1-l2-7b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model zarakiquemparte/zarablend-1.1-l2-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-24T16:53:24.152575(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of zarakiquemparte/zarablend-1.1-l2-7b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model zarakiquemparte/zarablend-1.1-l2-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-24T16:53:24.152575(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 25, 31, 173, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of zarakiquemparte/zarablend-1.1-l2-7b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model zarakiquemparte/zarablend-1.1-l2-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-24T16:53:24.152575(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
fd474e77166850d2ff563c5d5cd25f7c87df250f
# Dataset Card for Evaluation run of uukuguy/speechless-codellama-34b-v1.9 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/uukuguy/speechless-codellama-34b-v1.9 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [uukuguy/speechless-codellama-34b-v1.9](https://huggingface.co/uukuguy/speechless-codellama-34b-v1.9) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_uukuguy__speechless-codellama-34b-v1.9", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-28T13:29:15.296218](https://huggingface.co/datasets/open-llm-leaderboard/details_uukuguy__speechless-codellama-34b-v1.9/blob/main/results_2023-10-28T13-29-15.296218.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.29771392617449666, "em_stderr": 0.004682699129958643, "f1": 0.3473626258389263, "f1_stderr": 0.004601090689469596, "acc": 0.4917554915020767, "acc_stderr": 0.012144352555904984 }, "harness|drop|3": { "em": 0.29771392617449666, "em_stderr": 0.004682699129958643, "f1": 0.3473626258389263, "f1_stderr": 0.004601090689469596 }, "harness|gsm8k|5": { "acc": 0.24791508718726307, "acc_stderr": 0.01189398021482617 }, "harness|winogrande|5": { "acc": 0.7355958958168903, "acc_stderr": 0.012394724896983799 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_uukuguy__speechless-codellama-34b-v1.9
[ "region:us" ]
2023-10-08T19:45:22+00:00
{"pretty_name": "Evaluation run of uukuguy/speechless-codellama-34b-v1.9", "dataset_summary": "Dataset automatically created during the evaluation run of model [uukuguy/speechless-codellama-34b-v1.9](https://huggingface.co/uukuguy/speechless-codellama-34b-v1.9) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_uukuguy__speechless-codellama-34b-v1.9\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-28T13:29:15.296218](https://huggingface.co/datasets/open-llm-leaderboard/details_uukuguy__speechless-codellama-34b-v1.9/blob/main/results_2023-10-28T13-29-15.296218.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.29771392617449666,\n \"em_stderr\": 0.004682699129958643,\n \"f1\": 0.3473626258389263,\n \"f1_stderr\": 0.004601090689469596,\n \"acc\": 0.4917554915020767,\n \"acc_stderr\": 0.012144352555904984\n },\n \"harness|drop|3\": {\n \"em\": 0.29771392617449666,\n \"em_stderr\": 0.004682699129958643,\n \"f1\": 0.3473626258389263,\n \"f1_stderr\": 0.004601090689469596\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.24791508718726307,\n \"acc_stderr\": 0.01189398021482617\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7355958958168903,\n \"acc_stderr\": 0.012394724896983799\n }\n}\n```", "repo_url": "https://huggingface.co/uukuguy/speechless-codellama-34b-v1.9", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|arc:challenge|25_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_28T13_29_15.296218", "path": ["**/details_harness|drop|3_2023-10-28T13-29-15.296218.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-28T13-29-15.296218.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_28T13_29_15.296218", "path": ["**/details_harness|gsm8k|5_2023-10-28T13-29-15.296218.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-28T13-29-15.296218.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hellaswag|10_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T20-44-59.061253.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T20-44-59.061253.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T20-44-59.061253.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_28T13_29_15.296218", "path": ["**/details_harness|winogrande|5_2023-10-28T13-29-15.296218.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-28T13-29-15.296218.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_08T20_44_59.061253", "path": ["results_2023-10-08T20-44-59.061253.parquet"]}, {"split": "2023_10_28T13_29_15.296218", "path": ["results_2023-10-28T13-29-15.296218.parquet"]}, {"split": "latest", "path": ["results_2023-10-28T13-29-15.296218.parquet"]}]}]}
2023-10-28T12:29:27+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of uukuguy/speechless-codellama-34b-v1.9 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model uukuguy/speechless-codellama-34b-v1.9 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-28T13:29:15.296218(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of uukuguy/speechless-codellama-34b-v1.9", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model uukuguy/speechless-codellama-34b-v1.9 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-28T13:29:15.296218(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of uukuguy/speechless-codellama-34b-v1.9", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model uukuguy/speechless-codellama-34b-v1.9 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-28T13:29:15.296218(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 26, 31, 174, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of uukuguy/speechless-codellama-34b-v1.9## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model uukuguy/speechless-codellama-34b-v1.9 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-28T13:29:15.296218(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
fc4babc1f44e0aad9e8c590c01dce5ae87ca3cd4
# Dataset Card for Evaluation run of openbmb/UltraRM-13b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/openbmb/UltraRM-13b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [openbmb/UltraRM-13b](https://huggingface.co/openbmb/UltraRM-13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_openbmb__UltraRM-13b", "harness_gsm8k_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-02T13:26:56.823138](https://huggingface.co/datasets/open-llm-leaderboard/details_openbmb__UltraRM-13b/blob/main/results_2023-12-02T13-26-56.823138.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.0, "acc_stderr": 0.0 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_openbmb__UltraRM-13b
[ "region:us" ]
2023-10-08T19:46:12+00:00
{"pretty_name": "Evaluation run of openbmb/UltraRM-13b", "dataset_summary": "Dataset automatically created during the evaluation run of model [openbmb/UltraRM-13b](https://huggingface.co/openbmb/UltraRM-13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_openbmb__UltraRM-13b\",\n\t\"harness_gsm8k_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-02T13:26:56.823138](https://huggingface.co/datasets/open-llm-leaderboard/details_openbmb__UltraRM-13b/blob/main/results_2023-12-02T13-26-56.823138.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/openbmb/UltraRM-13b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|arc:challenge|25_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_24T08_13_56.124311", "path": ["**/details_harness|drop|3_2023-10-24T08-13-56.124311.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-24T08-13-56.124311.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_24T08_13_56.124311", "path": ["**/details_harness|gsm8k|5_2023-10-24T08-13-56.124311.parquet"]}, {"split": "2023_12_02T13_26_56.823138", "path": ["**/details_harness|gsm8k|5_2023-12-02T13-26-56.823138.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-02T13-26-56.823138.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hellaswag|10_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T20-45-47.827028.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T20-45-47.827028.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T20-45-47.827028.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_24T08_13_56.124311", "path": ["**/details_harness|winogrande|5_2023-10-24T08-13-56.124311.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-24T08-13-56.124311.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_08T20_45_47.827028", "path": ["results_2023-10-08T20-45-47.827028.parquet"]}, {"split": "2023_10_24T08_13_56.124311", "path": ["results_2023-10-24T08-13-56.124311.parquet"]}, {"split": "2023_12_02T13_26_56.823138", "path": ["results_2023-12-02T13-26-56.823138.parquet"]}, {"split": "latest", "path": ["results_2023-12-02T13-26-56.823138.parquet"]}]}]}
2023-12-02T13:27:15+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of openbmb/UltraRM-13b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model openbmb/UltraRM-13b on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-02T13:26:56.823138(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of openbmb/UltraRM-13b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model openbmb/UltraRM-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-02T13:26:56.823138(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of openbmb/UltraRM-13b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model openbmb/UltraRM-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-02T13:26:56.823138(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 18, 31, 167, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of openbmb/UltraRM-13b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model openbmb/UltraRM-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-02T13:26:56.823138(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
88b6bfce3f388c706418d798b6a932ce8839a335
# Dataset Card for Evaluation run of itsliupeng/llama2_7b_code ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/itsliupeng/llama2_7b_code - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [itsliupeng/llama2_7b_code](https://huggingface.co/itsliupeng/llama2_7b_code) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_itsliupeng__llama2_7b_code", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-26T11:17:28.829100](https://huggingface.co/datasets/open-llm-leaderboard/details_itsliupeng__llama2_7b_code/blob/main/results_2023-10-26T11-17-28.829100.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0009437919463087249, "em_stderr": 0.00031446531194130476, "f1": 0.05393036912751694, "f1_stderr": 0.0012935627430820335, "acc": 0.3980985212183299, "acc_stderr": 0.01010319096153194 }, "harness|drop|3": { "em": 0.0009437919463087249, "em_stderr": 0.00031446531194130476, "f1": 0.05393036912751694, "f1_stderr": 0.0012935627430820335 }, "harness|gsm8k|5": { "acc": 0.08112206216830932, "acc_stderr": 0.007520395797922653 }, "harness|winogrande|5": { "acc": 0.7150749802683505, "acc_stderr": 0.012685986125141227 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_itsliupeng__llama2_7b_code
[ "region:us" ]
2023-10-08T19:46:51+00:00
{"pretty_name": "Evaluation run of itsliupeng/llama2_7b_code", "dataset_summary": "Dataset automatically created during the evaluation run of model [itsliupeng/llama2_7b_code](https://huggingface.co/itsliupeng/llama2_7b_code) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_itsliupeng__llama2_7b_code\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-26T11:17:28.829100](https://huggingface.co/datasets/open-llm-leaderboard/details_itsliupeng__llama2_7b_code/blob/main/results_2023-10-26T11-17-28.829100.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0009437919463087249,\n \"em_stderr\": 0.00031446531194130476,\n \"f1\": 0.05393036912751694,\n \"f1_stderr\": 0.0012935627430820335,\n \"acc\": 0.3980985212183299,\n \"acc_stderr\": 0.01010319096153194\n },\n \"harness|drop|3\": {\n \"em\": 0.0009437919463087249,\n \"em_stderr\": 0.00031446531194130476,\n \"f1\": 0.05393036912751694,\n \"f1_stderr\": 0.0012935627430820335\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.08112206216830932,\n \"acc_stderr\": 0.007520395797922653\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7150749802683505,\n \"acc_stderr\": 0.012685986125141227\n }\n}\n```", "repo_url": "https://huggingface.co/itsliupeng/llama2_7b_code", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|arc:challenge|25_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_26T11_17_28.829100", "path": ["**/details_harness|drop|3_2023-10-26T11-17-28.829100.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-26T11-17-28.829100.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_26T11_17_28.829100", "path": ["**/details_harness|gsm8k|5_2023-10-26T11-17-28.829100.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-26T11-17-28.829100.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hellaswag|10_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T20-46-27.226805.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T20-46-27.226805.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T20-46-27.226805.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_26T11_17_28.829100", "path": ["**/details_harness|winogrande|5_2023-10-26T11-17-28.829100.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-26T11-17-28.829100.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_08T20_46_27.226805", "path": ["results_2023-10-08T20-46-27.226805.parquet"]}, {"split": "2023_10_26T11_17_28.829100", "path": ["results_2023-10-26T11-17-28.829100.parquet"]}, {"split": "latest", "path": ["results_2023-10-26T11-17-28.829100.parquet"]}]}]}
2023-10-26T10:17:41+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of itsliupeng/llama2_7b_code ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model itsliupeng/llama2_7b_code on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-26T11:17:28.829100(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of itsliupeng/llama2_7b_code", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model itsliupeng/llama2_7b_code on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-26T11:17:28.829100(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of itsliupeng/llama2_7b_code", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model itsliupeng/llama2_7b_code on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-26T11:17:28.829100(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 21, 31, 169, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of itsliupeng/llama2_7b_code## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model itsliupeng/llama2_7b_code on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-26T11:17:28.829100(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
55fb152048be78703321537fe98b91de59c07657
# Dataset Card for Evaluation run of Undi95/MLewd-ReMM-L2-Chat-20B-Inverted ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/Undi95/MLewd-ReMM-L2-Chat-20B-Inverted - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [Undi95/MLewd-ReMM-L2-Chat-20B-Inverted](https://huggingface.co/Undi95/MLewd-ReMM-L2-Chat-20B-Inverted) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Undi95__MLewd-ReMM-L2-Chat-20B-Inverted", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-29T11:23:30.940403](https://huggingface.co/datasets/open-llm-leaderboard/details_Undi95__MLewd-ReMM-L2-Chat-20B-Inverted/blob/main/results_2023-10-29T11-23-30.940403.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.04079278523489933, "em_stderr": 0.0020257579367794474, "f1": 0.12161703020134187, "f1_stderr": 0.002493984929248759, "acc": 0.4235474125060661, "acc_stderr": 0.009995123061460923 }, "harness|drop|3": { "em": 0.04079278523489933, "em_stderr": 0.0020257579367794474, "f1": 0.12161703020134187, "f1_stderr": 0.002493984929248759 }, "harness|gsm8k|5": { "acc": 0.09097801364670205, "acc_stderr": 0.007921322844013656 }, "harness|winogrande|5": { "acc": 0.7561168113654302, "acc_stderr": 0.01206892327890819 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_Undi95__MLewd-ReMM-L2-Chat-20B-Inverted
[ "region:us" ]
2023-10-08T20:13:29+00:00
{"pretty_name": "Evaluation run of Undi95/MLewd-ReMM-L2-Chat-20B-Inverted", "dataset_summary": "Dataset automatically created during the evaluation run of model [Undi95/MLewd-ReMM-L2-Chat-20B-Inverted](https://huggingface.co/Undi95/MLewd-ReMM-L2-Chat-20B-Inverted) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Undi95__MLewd-ReMM-L2-Chat-20B-Inverted\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-29T11:23:30.940403](https://huggingface.co/datasets/open-llm-leaderboard/details_Undi95__MLewd-ReMM-L2-Chat-20B-Inverted/blob/main/results_2023-10-29T11-23-30.940403.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.04079278523489933,\n \"em_stderr\": 0.0020257579367794474,\n \"f1\": 0.12161703020134187,\n \"f1_stderr\": 0.002493984929248759,\n \"acc\": 0.4235474125060661,\n \"acc_stderr\": 0.009995123061460923\n },\n \"harness|drop|3\": {\n \"em\": 0.04079278523489933,\n \"em_stderr\": 0.0020257579367794474,\n \"f1\": 0.12161703020134187,\n \"f1_stderr\": 0.002493984929248759\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.09097801364670205,\n \"acc_stderr\": 0.007921322844013656\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7561168113654302,\n \"acc_stderr\": 0.01206892327890819\n }\n}\n```", "repo_url": "https://huggingface.co/Undi95/MLewd-ReMM-L2-Chat-20B-Inverted", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|arc:challenge|25_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_29T11_23_30.940403", "path": ["**/details_harness|drop|3_2023-10-29T11-23-30.940403.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-29T11-23-30.940403.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_29T11_23_30.940403", "path": ["**/details_harness|gsm8k|5_2023-10-29T11-23-30.940403.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-29T11-23-30.940403.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hellaswag|10_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T21-13-04.392733.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T21-13-04.392733.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T21-13-04.392733.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_29T11_23_30.940403", "path": ["**/details_harness|winogrande|5_2023-10-29T11-23-30.940403.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-29T11-23-30.940403.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_08T21_13_04.392733", "path": ["results_2023-10-08T21-13-04.392733.parquet"]}, {"split": "2023_10_29T11_23_30.940403", "path": ["results_2023-10-29T11-23-30.940403.parquet"]}, {"split": "latest", "path": ["results_2023-10-29T11-23-30.940403.parquet"]}]}]}
2023-10-29T11:23:43+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Undi95/MLewd-ReMM-L2-Chat-20B-Inverted ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model Undi95/MLewd-ReMM-L2-Chat-20B-Inverted on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-29T11:23:30.940403(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of Undi95/MLewd-ReMM-L2-Chat-20B-Inverted", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Undi95/MLewd-ReMM-L2-Chat-20B-Inverted on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-29T11:23:30.940403(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Undi95/MLewd-ReMM-L2-Chat-20B-Inverted", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Undi95/MLewd-ReMM-L2-Chat-20B-Inverted on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-29T11:23:30.940403(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 29, 31, 177, 68, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Undi95/MLewd-ReMM-L2-Chat-20B-Inverted## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model Undi95/MLewd-ReMM-L2-Chat-20B-Inverted on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-29T11:23:30.940403(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
aa097ab8024d6902a7fd9868df9a3999e2013b56
# Dataset Card for Evaluation run of pankajmathur/Lima_Unchained_70b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/pankajmathur/Lima_Unchained_70b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [pankajmathur/Lima_Unchained_70b](https://huggingface.co/pankajmathur/Lima_Unchained_70b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_pankajmathur__Lima_Unchained_70b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-24T15:12:00.885313](https://huggingface.co/datasets/open-llm-leaderboard/details_pankajmathur__Lima_Unchained_70b/blob/main/results_2023-10-24T15-12-00.885313.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.08095637583892618, "em_stderr": 0.0027934007378494835, "f1": 0.14366401006711405, "f1_stderr": 0.0029514013565745323, "acc": 0.591927346839615, "acc_stderr": 0.011752297176210316 }, "harness|drop|3": { "em": 0.08095637583892618, "em_stderr": 0.0027934007378494835, "f1": 0.14366401006711405, "f1_stderr": 0.0029514013565745323 }, "harness|gsm8k|5": { "acc": 0.34723275208491283, "acc_stderr": 0.01311389838214687 }, "harness|winogrande|5": { "acc": 0.8366219415943172, "acc_stderr": 0.01039069597027376 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_pankajmathur__Lima_Unchained_70b
[ "region:us" ]
2023-10-08T20:18:38+00:00
{"pretty_name": "Evaluation run of pankajmathur/Lima_Unchained_70b", "dataset_summary": "Dataset automatically created during the evaluation run of model [pankajmathur/Lima_Unchained_70b](https://huggingface.co/pankajmathur/Lima_Unchained_70b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_pankajmathur__Lima_Unchained_70b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-24T15:12:00.885313](https://huggingface.co/datasets/open-llm-leaderboard/details_pankajmathur__Lima_Unchained_70b/blob/main/results_2023-10-24T15-12-00.885313.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.08095637583892618,\n \"em_stderr\": 0.0027934007378494835,\n \"f1\": 0.14366401006711405,\n \"f1_stderr\": 0.0029514013565745323,\n \"acc\": 0.591927346839615,\n \"acc_stderr\": 0.011752297176210316\n },\n \"harness|drop|3\": {\n \"em\": 0.08095637583892618,\n \"em_stderr\": 0.0027934007378494835,\n \"f1\": 0.14366401006711405,\n \"f1_stderr\": 0.0029514013565745323\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.34723275208491283,\n \"acc_stderr\": 0.01311389838214687\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8366219415943172,\n \"acc_stderr\": 0.01039069597027376\n }\n}\n```", "repo_url": "https://huggingface.co/pankajmathur/Lima_Unchained_70b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|arc:challenge|25_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_24T15_12_00.885313", "path": ["**/details_harness|drop|3_2023-10-24T15-12-00.885313.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-24T15-12-00.885313.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_24T15_12_00.885313", "path": ["**/details_harness|gsm8k|5_2023-10-24T15-12-00.885313.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-24T15-12-00.885313.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hellaswag|10_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T21-18-19.268295.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T21-18-19.268295.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T21-18-19.268295.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_24T15_12_00.885313", "path": ["**/details_harness|winogrande|5_2023-10-24T15-12-00.885313.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-24T15-12-00.885313.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_08T21_18_19.268295", "path": ["results_2023-10-08T21-18-19.268295.parquet"]}, {"split": "2023_10_24T15_12_00.885313", "path": ["results_2023-10-24T15-12-00.885313.parquet"]}, {"split": "latest", "path": ["results_2023-10-24T15-12-00.885313.parquet"]}]}]}
2023-10-24T14:12:13+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of pankajmathur/Lima_Unchained_70b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model pankajmathur/Lima_Unchained_70b on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-24T15:12:00.885313(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of pankajmathur/Lima_Unchained_70b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model pankajmathur/Lima_Unchained_70b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-24T15:12:00.885313(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of pankajmathur/Lima_Unchained_70b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model pankajmathur/Lima_Unchained_70b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-24T15:12:00.885313(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 23, 31, 171, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of pankajmathur/Lima_Unchained_70b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model pankajmathur/Lima_Unchained_70b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-24T15:12:00.885313(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
c87f06b25056e1a8f911d61d2113867a6b70e910
# Dataset Card for "legal-es-masked" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
gayanin/legal-es-masked
[ "region:us" ]
2023-10-08T20:39:11+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}, {"split": "validation", "path": "data/validation-*"}]}], "dataset_info": {"features": [{"name": "original_sent", "dtype": "string"}, {"name": "masked_sent", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 14319276284, "num_examples": 48833571}, {"name": "test", "num_bytes": 2144523252, "num_examples": 6104196}, {"name": "validation", "num_bytes": 2169841655, "num_examples": 6104197}], "download_size": 8287754892, "dataset_size": 18633641191}}
2023-10-08T20:56:11+00:00
[]
[]
TAGS #region-us
# Dataset Card for "legal-es-masked" More Information needed
[ "# Dataset Card for \"legal-es-masked\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"legal-es-masked\"\n\nMore Information needed" ]
[ 6, 16 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"legal-es-masked\"\n\nMore Information needed" ]
9c3223dca7b808206d308a0381499f9ce7f8927e
# Dataset Card for "arxiv-metadata-oai-snapshot" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) This is a mirror of the metadata portion of the arXiv [dataset](https://www.kaggle.com/datasets/Cornell-University/arxiv/versions/147). The sync will take place weekly so may fall behind the original datasets slightly if there are more regular updates to the source dataset. ## Metadata This dataset is a mirror of the original ArXiv data. This dataset contains an entry for each paper, containing: - id: ArXiv ID (can be used to access the paper, see below) - submitter: Who submitted the paper - authors: Authors of the paper - title: Title of the paper - comments: Additional info, such as number of pages and figures - journal-ref: Information about the journal the paper was published in - doi: [https://www.doi.org](Digital Object Identifier) - abstract: The abstract of the paper - categories: Categories / tags in the ArXiv system - versions: A version history You can access each paper directly on ArXiv using these links: - `https://arxiv.org/abs/{id}`: Page for this paper including its abstract and further links - `https://arxiv.org/pdf/{id}`: Direct link to download the PDF
librarian-bots/arxiv-metadata-snapshot
[ "task_categories:text-generation", "task_categories:text-classification", "size_categories:1M<n<10M", "language:en", "license:cc0-1.0", "arxiv", "science", "region:us" ]
2023-10-08T20:40:28+00:00
{"language": ["en"], "license": "cc0-1.0", "size_categories": ["1M<n<10M"], "task_categories": ["text-generation", "text-classification"], "pretty_name": "arXiv Metadata Dataset", "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}], "dataset_info": {"features": [{"name": "id", "dtype": "string"}, {"name": "submitter", "dtype": "string"}, {"name": "authors", "dtype": "string"}, {"name": "title", "dtype": "string"}, {"name": "comments", "dtype": "string"}, {"name": "journal-ref", "dtype": "string"}, {"name": "doi", "dtype": "string"}, {"name": "report-no", "dtype": "string"}, {"name": "categories", "dtype": "string"}, {"name": "license", "dtype": "string"}, {"name": "abstract", "dtype": "string"}, {"name": "versions", "list": [{"name": "version", "dtype": "string"}, {"name": "created", "dtype": "string"}]}, {"name": "update_date", "dtype": "timestamp[s]"}, {"name": "authors_parsed", "sequence": {"sequence": "string"}}], "splits": [{"name": "train", "num_bytes": 3625651814.0, "num_examples": 2417693}], "download_size": 2029769171, "dataset_size": 3625651814.0}, "tags": ["arxiv", "science"]}
2024-02-12T06:14:00+00:00
[]
[ "en" ]
TAGS #task_categories-text-generation #task_categories-text-classification #size_categories-1M<n<10M #language-English #license-cc0-1.0 #arxiv #science #region-us
# Dataset Card for "arxiv-metadata-oai-snapshot" More Information needed This is a mirror of the metadata portion of the arXiv dataset. The sync will take place weekly so may fall behind the original datasets slightly if there are more regular updates to the source dataset. ## Metadata This dataset is a mirror of the original ArXiv data. This dataset contains an entry for each paper, containing: - id: ArXiv ID (can be used to access the paper, see below) - submitter: Who submitted the paper - authors: Authors of the paper - title: Title of the paper - comments: Additional info, such as number of pages and figures - journal-ref: Information about the journal the paper was published in - doi: URL - abstract: The abstract of the paper - categories: Categories / tags in the ArXiv system - versions: A version history You can access each paper directly on ArXiv using these links: - 'URL Page for this paper including its abstract and further links - 'URL Direct link to download the PDF
[ "# Dataset Card for \"arxiv-metadata-oai-snapshot\"\n\nMore Information needed\n\nThis is a mirror of the metadata portion of the arXiv dataset. \n\nThe sync will take place weekly so may fall behind the original datasets slightly if there are more regular updates to the source dataset.", "## Metadata\n\nThis dataset is a mirror of the original ArXiv data. This dataset contains an entry for each paper, containing:\n\n- id: ArXiv ID (can be used to access the paper, see below)\n- submitter: Who submitted the paper\n- authors: Authors of the paper\n- title: Title of the paper\n- comments: Additional info, such as number of pages and figures\n- journal-ref: Information about the journal the paper was published in\n- doi: URL\n- abstract: The abstract of the paper\n- categories: Categories / tags in the ArXiv system\n- versions: A version history\n\nYou can access each paper directly on ArXiv using these links:\n\n- 'URL Page for this paper including its abstract and further links\n- 'URL Direct link to download the PDF" ]
[ "TAGS\n#task_categories-text-generation #task_categories-text-classification #size_categories-1M<n<10M #language-English #license-cc0-1.0 #arxiv #science #region-us \n", "# Dataset Card for \"arxiv-metadata-oai-snapshot\"\n\nMore Information needed\n\nThis is a mirror of the metadata portion of the arXiv dataset. \n\nThe sync will take place weekly so may fall behind the original datasets slightly if there are more regular updates to the source dataset.", "## Metadata\n\nThis dataset is a mirror of the original ArXiv data. This dataset contains an entry for each paper, containing:\n\n- id: ArXiv ID (can be used to access the paper, see below)\n- submitter: Who submitted the paper\n- authors: Authors of the paper\n- title: Title of the paper\n- comments: Additional info, such as number of pages and figures\n- journal-ref: Information about the journal the paper was published in\n- doi: URL\n- abstract: The abstract of the paper\n- categories: Categories / tags in the ArXiv system\n- versions: A version history\n\nYou can access each paper directly on ArXiv using these links:\n\n- 'URL Page for this paper including its abstract and further links\n- 'URL Direct link to download the PDF" ]
[ 58, 70, 173 ]
[ "passage: TAGS\n#task_categories-text-generation #task_categories-text-classification #size_categories-1M<n<10M #language-English #license-cc0-1.0 #arxiv #science #region-us \n# Dataset Card for \"arxiv-metadata-oai-snapshot\"\n\nMore Information needed\n\nThis is a mirror of the metadata portion of the arXiv dataset. \n\nThe sync will take place weekly so may fall behind the original datasets slightly if there are more regular updates to the source dataset.## Metadata\n\nThis dataset is a mirror of the original ArXiv data. This dataset contains an entry for each paper, containing:\n\n- id: ArXiv ID (can be used to access the paper, see below)\n- submitter: Who submitted the paper\n- authors: Authors of the paper\n- title: Title of the paper\n- comments: Additional info, such as number of pages and figures\n- journal-ref: Information about the journal the paper was published in\n- doi: URL\n- abstract: The abstract of the paper\n- categories: Categories / tags in the ArXiv system\n- versions: A version history\n\nYou can access each paper directly on ArXiv using these links:\n\n- 'URL Page for this paper including its abstract and further links\n- 'URL Direct link to download the PDF" ]
368d96e4a146c69aa740d39e670dfcc8d2ff1a8c
# Dataset Card for Evaluation run of uukuguy/speechless-codellama-34b-v2.0 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/uukuguy/speechless-codellama-34b-v2.0 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [uukuguy/speechless-codellama-34b-v2.0](https://huggingface.co/uukuguy/speechless-codellama-34b-v2.0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_uukuguy__speechless-codellama-34b-v2.0", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-23T15:35:47.826162](https://huggingface.co/datasets/open-llm-leaderboard/details_uukuguy__speechless-codellama-34b-v2.0/blob/main/results_2023-10-23T15-35-47.826162.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.3704907718120805, "em_stderr": 0.004945718565106882, "f1": 0.4170574664429539, "f1_stderr": 0.004815998685057963, "acc": 0.42579643160821773, "acc_stderr": 0.010607605194213141 }, "harness|drop|3": { "em": 0.3704907718120805, "em_stderr": 0.004945718565106882, "f1": 0.4170574664429539, "f1_stderr": 0.004815998685057963 }, "harness|gsm8k|5": { "acc": 0.11599696739954511, "acc_stderr": 0.008820485491442485 }, "harness|winogrande|5": { "acc": 0.7355958958168903, "acc_stderr": 0.012394724896983799 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_uukuguy__speechless-codellama-34b-v2.0
[ "region:us" ]
2023-10-08T20:56:02+00:00
{"pretty_name": "Evaluation run of uukuguy/speechless-codellama-34b-v2.0", "dataset_summary": "Dataset automatically created during the evaluation run of model [uukuguy/speechless-codellama-34b-v2.0](https://huggingface.co/uukuguy/speechless-codellama-34b-v2.0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_uukuguy__speechless-codellama-34b-v2.0\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-23T15:35:47.826162](https://huggingface.co/datasets/open-llm-leaderboard/details_uukuguy__speechless-codellama-34b-v2.0/blob/main/results_2023-10-23T15-35-47.826162.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.3704907718120805,\n \"em_stderr\": 0.004945718565106882,\n \"f1\": 0.4170574664429539,\n \"f1_stderr\": 0.004815998685057963,\n \"acc\": 0.42579643160821773,\n \"acc_stderr\": 0.010607605194213141\n },\n \"harness|drop|3\": {\n \"em\": 0.3704907718120805,\n \"em_stderr\": 0.004945718565106882,\n \"f1\": 0.4170574664429539,\n \"f1_stderr\": 0.004815998685057963\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.11599696739954511,\n \"acc_stderr\": 0.008820485491442485\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7355958958168903,\n \"acc_stderr\": 0.012394724896983799\n }\n}\n```", "repo_url": "https://huggingface.co/uukuguy/speechless-codellama-34b-v2.0", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|arc:challenge|25_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_23T15_35_47.826162", "path": ["**/details_harness|drop|3_2023-10-23T15-35-47.826162.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-23T15-35-47.826162.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_23T15_35_47.826162", "path": ["**/details_harness|gsm8k|5_2023-10-23T15-35-47.826162.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-23T15-35-47.826162.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hellaswag|10_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T21-55-38.209151.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T21-55-38.209151.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T21-55-38.209151.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_23T15_35_47.826162", "path": ["**/details_harness|winogrande|5_2023-10-23T15-35-47.826162.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-23T15-35-47.826162.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_08T21_55_38.209151", "path": ["results_2023-10-08T21-55-38.209151.parquet"]}, {"split": "2023_10_23T15_35_47.826162", "path": ["results_2023-10-23T15-35-47.826162.parquet"]}, {"split": "latest", "path": ["results_2023-10-23T15-35-47.826162.parquet"]}]}]}
2023-10-23T14:36:00+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of uukuguy/speechless-codellama-34b-v2.0 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model uukuguy/speechless-codellama-34b-v2.0 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-23T15:35:47.826162(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of uukuguy/speechless-codellama-34b-v2.0", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model uukuguy/speechless-codellama-34b-v2.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-23T15:35:47.826162(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of uukuguy/speechless-codellama-34b-v2.0", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model uukuguy/speechless-codellama-34b-v2.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-23T15:35:47.826162(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 26, 31, 174, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of uukuguy/speechless-codellama-34b-v2.0## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model uukuguy/speechless-codellama-34b-v2.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-23T15:35:47.826162(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
146445c6acb44444aa5dd4ee6c6f69fe82d2b525
# Dataset Card for "DDI2013_test" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
hippocrates/DDI2013_test
[ "region:us" ]
2023-10-08T21:20:53+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "valid", "path": "data/valid-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "id", "dtype": "string"}, {"name": "query", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "choices", "sequence": "string"}, {"name": "gold", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 20658927, "num_examples": 18779}, {"name": "valid", "num_bytes": 8739656, "num_examples": 7244}, {"name": "test", "num_bytes": 6455758, "num_examples": 5761}], "download_size": 3113073, "dataset_size": 35854341}}
2023-10-12T18:21:33+00:00
[]
[]
TAGS #region-us
# Dataset Card for "DDI2013_test" More Information needed
[ "# Dataset Card for \"DDI2013_test\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"DDI2013_test\"\n\nMore Information needed" ]
[ 6, 15 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"DDI2013_test\"\n\nMore Information needed" ]
2c79f020b18a8af049d4c0f165cfb149bd0b5297
# Dataset Card for "25b005b7" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
result-kand2-sdxl-wuerst-karlo/25b005b7
[ "region:us" ]
2023-10-08T21:30:23+00:00
{"dataset_info": {"features": [{"name": "result", "dtype": "string"}, {"name": "id", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 198, "num_examples": 10}], "download_size": 1383, "dataset_size": 198}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-08T21:30:24+00:00
[]
[]
TAGS #region-us
# Dataset Card for "25b005b7" More Information needed
[ "# Dataset Card for \"25b005b7\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"25b005b7\"\n\nMore Information needed" ]
[ 6, 15 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"25b005b7\"\n\nMore Information needed" ]
eeaecf963afa297c265d0810c95579f60dcb5e0b
# Dataset Card for "3658ecd8" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
result-kand2-sdxl-wuerst-karlo/3658ecd8
[ "region:us" ]
2023-10-08T21:30:26+00:00
{"dataset_info": {"features": [{"name": "result", "dtype": "string"}, {"name": "id", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 198, "num_examples": 10}], "download_size": 1383, "dataset_size": 198}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-08T21:30:26+00:00
[]
[]
TAGS #region-us
# Dataset Card for "3658ecd8" More Information needed
[ "# Dataset Card for \"3658ecd8\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"3658ecd8\"\n\nMore Information needed" ]
[ 6, 15 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"3658ecd8\"\n\nMore Information needed" ]
44b71e5964ee35a9858b318ce227034e435942d6
# Dataset Card for Evaluation run of Aeala/Alpaca-elina-65b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/Aeala/Alpaca-elina-65b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [Aeala/Alpaca-elina-65b](https://huggingface.co/Aeala/Alpaca-elina-65b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 3 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Aeala__Alpaca-elina-65b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-08T22:34:28.379829](https://huggingface.co/datasets/open-llm-leaderboard/details_Aeala__Alpaca-elina-65b/blob/main/results_2023-10-08T22-34-28.379829.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.27736996644295303, "em_stderr": 0.004584873651869028, "f1": 0.33694211409395997, "f1_stderr": 0.004497646539610947, "acc": 0.5520523608267965, "acc_stderr": 0.011722735218747584 }, "harness|drop|3": { "em": 0.27736996644295303, "em_stderr": 0.004584873651869028, "f1": 0.33694211409395997, "f1_stderr": 0.004497646539610947 }, "harness|gsm8k|5": { "acc": 0.29037149355572406, "acc_stderr": 0.012503592481818955 }, "harness|winogrande|5": { "acc": 0.813733228097869, "acc_stderr": 0.010941877955676211 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_Aeala__Alpaca-elina-65b
[ "region:us" ]
2023-10-08T21:34:32+00:00
{"pretty_name": "Evaluation run of Aeala/Alpaca-elina-65b", "dataset_summary": "Dataset automatically created during the evaluation run of model [Aeala/Alpaca-elina-65b](https://huggingface.co/Aeala/Alpaca-elina-65b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 3 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Aeala__Alpaca-elina-65b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-08T22:34:28.379829](https://huggingface.co/datasets/open-llm-leaderboard/details_Aeala__Alpaca-elina-65b/blob/main/results_2023-10-08T22-34-28.379829.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.27736996644295303,\n \"em_stderr\": 0.004584873651869028,\n \"f1\": 0.33694211409395997,\n \"f1_stderr\": 0.004497646539610947,\n \"acc\": 0.5520523608267965,\n \"acc_stderr\": 0.011722735218747584\n },\n \"harness|drop|3\": {\n \"em\": 0.27736996644295303,\n \"em_stderr\": 0.004584873651869028,\n \"f1\": 0.33694211409395997,\n \"f1_stderr\": 0.004497646539610947\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.29037149355572406,\n \"acc_stderr\": 0.012503592481818955\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.813733228097869,\n \"acc_stderr\": 0.010941877955676211\n }\n}\n```", "repo_url": "https://huggingface.co/Aeala/Alpaca-elina-65b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_08T22_34_28.379829", "path": ["**/details_harness|drop|3_2023-10-08T22-34-28.379829.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-08T22-34-28.379829.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_08T22_34_28.379829", "path": ["**/details_harness|gsm8k|5_2023-10-08T22-34-28.379829.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-08T22-34-28.379829.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_08T22_34_28.379829", "path": ["**/details_harness|winogrande|5_2023-10-08T22-34-28.379829.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-08T22-34-28.379829.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_08T22_34_28.379829", "path": ["results_2023-10-08T22-34-28.379829.parquet"]}, {"split": "latest", "path": ["results_2023-10-08T22-34-28.379829.parquet"]}]}]}
2023-10-08T21:34:40+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Aeala/Alpaca-elina-65b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model Aeala/Alpaca-elina-65b on the Open LLM Leaderboard. The dataset is composed of 3 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-08T22:34:28.379829(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of Aeala/Alpaca-elina-65b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Aeala/Alpaca-elina-65b on the Open LLM Leaderboard.\n\nThe dataset is composed of 3 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-08T22:34:28.379829(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Aeala/Alpaca-elina-65b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Aeala/Alpaca-elina-65b on the Open LLM Leaderboard.\n\nThe dataset is composed of 3 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-08T22:34:28.379829(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 22, 31, 170, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Aeala/Alpaca-elina-65b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model Aeala/Alpaca-elina-65b on the Open LLM Leaderboard.\n\nThe dataset is composed of 3 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-08T22:34:28.379829(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
62a27eaefd6c347335a0ae15618452f61527d3e7
# Dataset Card for "gpt4v-LAION-discord" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
EduardoPacheco/gpt4v-LAION-discord
[ "region:us" ]
2023-10-08T21:47:05+00:00
{"dataset_info": {"features": [{"name": "caption", "dtype": "string"}, {"name": "image", "dtype": "image"}, {"name": "link", "dtype": "string"}, {"name": "message_id", "dtype": "string"}, {"name": "timestamp", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 36014887.0, "num_examples": 136}], "download_size": 0, "dataset_size": 36014887.0}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-16T15:05:32+00:00
[]
[]
TAGS #region-us
# Dataset Card for "gpt4v-LAION-discord" More Information needed
[ "# Dataset Card for \"gpt4v-LAION-discord\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"gpt4v-LAION-discord\"\n\nMore Information needed" ]
[ 6, 20 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"gpt4v-LAION-discord\"\n\nMore Information needed" ]
cbdbe99039e89b5413441da00e0f69b56294e8b7
# Dataset Card for Evaluation run of u-chom/preded-title-amazongoogle-abtbuy ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/u-chom/preded-title-amazongoogle-abtbuy - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [u-chom/preded-title-amazongoogle-abtbuy](https://huggingface.co/u-chom/preded-title-amazongoogle-abtbuy) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_u-chom__preded-title-amazongoogle-abtbuy", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-10-08T23:27:01.372351](https://huggingface.co/datasets/open-llm-leaderboard/details_u-chom__preded-title-amazongoogle-abtbuy/blob/main/results_2023-10-08T23-27-01.372351.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.38488561065128146, "acc_stderr": 0.03460083230388379, "acc_norm": 0.38889827098513907, "acc_norm_stderr": 0.034587970268505575, "mc1": 0.2692778457772338, "mc1_stderr": 0.015528566637087281, "mc2": 0.4164930056701617, "mc2_stderr": 0.013916947335276144 }, "harness|arc:challenge|25": { "acc": 0.4667235494880546, "acc_stderr": 0.014578995859605808, "acc_norm": 0.5093856655290102, "acc_norm_stderr": 0.014608816322065 }, "harness|hellaswag|10": { "acc": 0.5873332005576578, "acc_stderr": 0.00491307684443376, "acc_norm": 0.7814180442143, "acc_norm_stderr": 0.004124396294659584 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4222222222222222, "acc_stderr": 0.042667634040995814, "acc_norm": 0.4222222222222222, "acc_norm_stderr": 0.042667634040995814 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.34868421052631576, "acc_stderr": 0.038781398887976104, "acc_norm": 0.34868421052631576, "acc_norm_stderr": 0.038781398887976104 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.3584905660377358, "acc_stderr": 0.02951470358398177, "acc_norm": 0.3584905660377358, "acc_norm_stderr": 0.02951470358398177 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.3888888888888889, "acc_stderr": 0.04076663253918567, "acc_norm": 0.3888888888888889, "acc_norm_stderr": 0.04076663253918567 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.24, "acc_stderr": 0.042923469599092816, "acc_norm": 0.24, "acc_norm_stderr": 0.042923469599092816 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.3236994219653179, "acc_stderr": 0.0356760379963917, "acc_norm": 0.3236994219653179, "acc_norm_stderr": 0.0356760379963917 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.20588235294117646, "acc_stderr": 0.04023382273617747, "acc_norm": 0.20588235294117646, "acc_norm_stderr": 0.04023382273617747 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.53, "acc_stderr": 0.05016135580465919, "acc_norm": 0.53, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.3829787234042553, "acc_stderr": 0.03177821250236922, "acc_norm": 0.3829787234042553, "acc_norm_stderr": 0.03177821250236922 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.24561403508771928, "acc_stderr": 0.040493392977481425, "acc_norm": 0.24561403508771928, "acc_norm_stderr": 0.040493392977481425 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.3586206896551724, "acc_stderr": 0.03996629574876719, "acc_norm": 0.3586206896551724, "acc_norm_stderr": 0.03996629574876719 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.21164021164021163, "acc_stderr": 0.021037331505262883, "acc_norm": 0.21164021164021163, "acc_norm_stderr": 0.021037331505262883 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.31746031746031744, "acc_stderr": 0.04163453031302859, "acc_norm": 0.31746031746031744, "acc_norm_stderr": 0.04163453031302859 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.35161290322580646, "acc_stderr": 0.027162537826948458, "acc_norm": 0.35161290322580646, "acc_norm_stderr": 0.027162537826948458 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.2413793103448276, "acc_stderr": 0.030108330718011625, "acc_norm": 0.2413793103448276, "acc_norm_stderr": 0.030108330718011625 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.4484848484848485, "acc_stderr": 0.038835659779569286, "acc_norm": 0.4484848484848485, "acc_norm_stderr": 0.038835659779569286 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.40404040404040403, "acc_stderr": 0.03496130972056127, "acc_norm": 0.40404040404040403, "acc_norm_stderr": 0.03496130972056127 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.5492227979274611, "acc_stderr": 0.03590910952235524, "acc_norm": 0.5492227979274611, "acc_norm_stderr": 0.03590910952235524 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.32564102564102565, "acc_stderr": 0.02375966576741229, "acc_norm": 0.32564102564102565, "acc_norm_stderr": 0.02375966576741229 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.21851851851851853, "acc_stderr": 0.02519575225182379, "acc_norm": 0.21851851851851853, "acc_norm_stderr": 0.02519575225182379 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.3445378151260504, "acc_stderr": 0.030868682604121626, "acc_norm": 0.3445378151260504, "acc_norm_stderr": 0.030868682604121626 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2251655629139073, "acc_stderr": 0.03410435282008937, "acc_norm": 0.2251655629139073, "acc_norm_stderr": 0.03410435282008937 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.43853211009174314, "acc_stderr": 0.021274713073954562, "acc_norm": 0.43853211009174314, "acc_norm_stderr": 0.021274713073954562 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.1712962962962963, "acc_stderr": 0.025695341643824685, "acc_norm": 0.1712962962962963, "acc_norm_stderr": 0.025695341643824685 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.4019607843137255, "acc_stderr": 0.034411900234824655, "acc_norm": 0.4019607843137255, "acc_norm_stderr": 0.034411900234824655 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.3881856540084388, "acc_stderr": 0.0317229500433233, "acc_norm": 0.3881856540084388, "acc_norm_stderr": 0.0317229500433233 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.4798206278026906, "acc_stderr": 0.033530461674123, "acc_norm": 0.4798206278026906, "acc_norm_stderr": 0.033530461674123 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.4198473282442748, "acc_stderr": 0.04328577215262972, "acc_norm": 0.4198473282442748, "acc_norm_stderr": 0.04328577215262972 }, "harness|hendrycksTest-international_law|5": { "acc": 0.512396694214876, "acc_stderr": 0.045629515481807666, "acc_norm": 0.512396694214876, "acc_norm_stderr": 0.045629515481807666 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.4444444444444444, "acc_stderr": 0.04803752235190193, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.04803752235190193 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.4049079754601227, "acc_stderr": 0.038566721635489125, "acc_norm": 0.4049079754601227, "acc_norm_stderr": 0.038566721635489125 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.3482142857142857, "acc_stderr": 0.04521829902833586, "acc_norm": 0.3482142857142857, "acc_norm_stderr": 0.04521829902833586 }, "harness|hendrycksTest-management|5": { "acc": 0.4077669902912621, "acc_stderr": 0.048657775704107675, "acc_norm": 0.4077669902912621, "acc_norm_stderr": 0.048657775704107675 }, "harness|hendrycksTest-marketing|5": { "acc": 0.6367521367521367, "acc_stderr": 0.03150712523091264, "acc_norm": 0.6367521367521367, "acc_norm_stderr": 0.03150712523091264 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.5491698595146871, "acc_stderr": 0.017793297572699034, "acc_norm": 0.5491698595146871, "acc_norm_stderr": 0.017793297572699034 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.42196531791907516, "acc_stderr": 0.02658923114217426, "acc_norm": 0.42196531791907516, "acc_norm_stderr": 0.02658923114217426 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23798882681564246, "acc_stderr": 0.014242630070574915, "acc_norm": 0.23798882681564246, "acc_norm_stderr": 0.014242630070574915 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.40522875816993464, "acc_stderr": 0.02811092849280907, "acc_norm": 0.40522875816993464, "acc_norm_stderr": 0.02811092849280907 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.5080385852090032, "acc_stderr": 0.028394421370984538, "acc_norm": 0.5080385852090032, "acc_norm_stderr": 0.028394421370984538 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.43209876543209874, "acc_stderr": 0.027563010971606676, "acc_norm": 0.43209876543209874, "acc_norm_stderr": 0.027563010971606676 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.30141843971631205, "acc_stderr": 0.02737412888263115, "acc_norm": 0.30141843971631205, "acc_norm_stderr": 0.02737412888263115 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.33572359843546284, "acc_stderr": 0.012061304157664607, "acc_norm": 0.33572359843546284, "acc_norm_stderr": 0.012061304157664607 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.24632352941176472, "acc_stderr": 0.02617343857052, "acc_norm": 0.24632352941176472, "acc_norm_stderr": 0.02617343857052 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.4035947712418301, "acc_stderr": 0.019848280168401154, "acc_norm": 0.4035947712418301, "acc_norm_stderr": 0.019848280168401154 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.4636363636363636, "acc_stderr": 0.047764491623961985, "acc_norm": 0.4636363636363636, "acc_norm_stderr": 0.047764491623961985 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.2653061224489796, "acc_stderr": 0.028263889943784606, "acc_norm": 0.2653061224489796, "acc_norm_stderr": 0.028263889943784606 }, "harness|hendrycksTest-sociology|5": { "acc": 0.5572139303482587, "acc_stderr": 0.03512310964123937, "acc_norm": 0.5572139303482587, "acc_norm_stderr": 0.03512310964123937 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.55, "acc_stderr": 0.05, "acc_norm": 0.55, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-virology|5": { "acc": 0.3855421686746988, "acc_stderr": 0.037891344246115496, "acc_norm": 0.3855421686746988, "acc_norm_stderr": 0.037891344246115496 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.6140350877192983, "acc_stderr": 0.03733756969066165, "acc_norm": 0.6140350877192983, "acc_norm_stderr": 0.03733756969066165 }, "harness|truthfulqa:mc|0": { "mc1": 0.2692778457772338, "mc1_stderr": 0.015528566637087281, "mc2": 0.4164930056701617, "mc2_stderr": 0.013916947335276144 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_u-chom__preded-title-amazongoogle-abtbuy
[ "region:us" ]
2023-10-08T22:27:25+00:00
{"pretty_name": "Evaluation run of u-chom/preded-title-amazongoogle-abtbuy", "dataset_summary": "Dataset automatically created during the evaluation run of model [u-chom/preded-title-amazongoogle-abtbuy](https://huggingface.co/u-chom/preded-title-amazongoogle-abtbuy) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_u-chom__preded-title-amazongoogle-abtbuy\",\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-08T23:27:01.372351](https://huggingface.co/datasets/open-llm-leaderboard/details_u-chom__preded-title-amazongoogle-abtbuy/blob/main/results_2023-10-08T23-27-01.372351.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.38488561065128146,\n \"acc_stderr\": 0.03460083230388379,\n \"acc_norm\": 0.38889827098513907,\n \"acc_norm_stderr\": 0.034587970268505575,\n \"mc1\": 0.2692778457772338,\n \"mc1_stderr\": 0.015528566637087281,\n \"mc2\": 0.4164930056701617,\n \"mc2_stderr\": 0.013916947335276144\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.4667235494880546,\n \"acc_stderr\": 0.014578995859605808,\n \"acc_norm\": 0.5093856655290102,\n \"acc_norm_stderr\": 0.014608816322065\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5873332005576578,\n \"acc_stderr\": 0.00491307684443376,\n \"acc_norm\": 0.7814180442143,\n \"acc_norm_stderr\": 0.004124396294659584\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4222222222222222,\n \"acc_stderr\": 0.042667634040995814,\n \"acc_norm\": 0.4222222222222222,\n \"acc_norm_stderr\": 0.042667634040995814\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.34868421052631576,\n \"acc_stderr\": 0.038781398887976104,\n \"acc_norm\": 0.34868421052631576,\n \"acc_norm_stderr\": 0.038781398887976104\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.3584905660377358,\n \"acc_stderr\": 0.02951470358398177,\n \"acc_norm\": 0.3584905660377358,\n \"acc_norm_stderr\": 0.02951470358398177\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.3888888888888889,\n \"acc_stderr\": 0.04076663253918567,\n \"acc_norm\": 0.3888888888888889,\n \"acc_norm_stderr\": 0.04076663253918567\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.24,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.3236994219653179,\n \"acc_stderr\": 0.0356760379963917,\n \"acc_norm\": 0.3236994219653179,\n \"acc_norm_stderr\": 0.0356760379963917\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.20588235294117646,\n \"acc_stderr\": 0.04023382273617747,\n \"acc_norm\": 0.20588235294117646,\n \"acc_norm_stderr\": 0.04023382273617747\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.3829787234042553,\n \"acc_stderr\": 0.03177821250236922,\n \"acc_norm\": 0.3829787234042553,\n \"acc_norm_stderr\": 0.03177821250236922\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.24561403508771928,\n \"acc_stderr\": 0.040493392977481425,\n \"acc_norm\": 0.24561403508771928,\n \"acc_norm_stderr\": 0.040493392977481425\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.3586206896551724,\n \"acc_stderr\": 0.03996629574876719,\n \"acc_norm\": 0.3586206896551724,\n \"acc_norm_stderr\": 0.03996629574876719\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.21164021164021163,\n \"acc_stderr\": 0.021037331505262883,\n \"acc_norm\": 0.21164021164021163,\n \"acc_norm_stderr\": 0.021037331505262883\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.31746031746031744,\n \"acc_stderr\": 0.04163453031302859,\n \"acc_norm\": 0.31746031746031744,\n \"acc_norm_stderr\": 0.04163453031302859\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.35161290322580646,\n \"acc_stderr\": 0.027162537826948458,\n \"acc_norm\": 0.35161290322580646,\n \"acc_norm_stderr\": 0.027162537826948458\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.2413793103448276,\n \"acc_stderr\": 0.030108330718011625,\n \"acc_norm\": 0.2413793103448276,\n \"acc_norm_stderr\": 0.030108330718011625\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.048241815132442176,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.048241815132442176\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.4484848484848485,\n \"acc_stderr\": 0.038835659779569286,\n \"acc_norm\": 0.4484848484848485,\n \"acc_norm_stderr\": 0.038835659779569286\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.40404040404040403,\n \"acc_stderr\": 0.03496130972056127,\n \"acc_norm\": 0.40404040404040403,\n \"acc_norm_stderr\": 0.03496130972056127\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.5492227979274611,\n \"acc_stderr\": 0.03590910952235524,\n \"acc_norm\": 0.5492227979274611,\n \"acc_norm_stderr\": 0.03590910952235524\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.32564102564102565,\n \"acc_stderr\": 0.02375966576741229,\n \"acc_norm\": 0.32564102564102565,\n \"acc_norm_stderr\": 0.02375966576741229\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.21851851851851853,\n \"acc_stderr\": 0.02519575225182379,\n \"acc_norm\": 0.21851851851851853,\n \"acc_norm_stderr\": 0.02519575225182379\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.3445378151260504,\n \"acc_stderr\": 0.030868682604121626,\n \"acc_norm\": 0.3445378151260504,\n \"acc_norm_stderr\": 0.030868682604121626\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2251655629139073,\n \"acc_stderr\": 0.03410435282008937,\n \"acc_norm\": 0.2251655629139073,\n \"acc_norm_stderr\": 0.03410435282008937\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.43853211009174314,\n \"acc_stderr\": 0.021274713073954562,\n \"acc_norm\": 0.43853211009174314,\n \"acc_norm_stderr\": 0.021274713073954562\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.1712962962962963,\n \"acc_stderr\": 0.025695341643824685,\n \"acc_norm\": 0.1712962962962963,\n \"acc_norm_stderr\": 0.025695341643824685\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.4019607843137255,\n \"acc_stderr\": 0.034411900234824655,\n \"acc_norm\": 0.4019607843137255,\n \"acc_norm_stderr\": 0.034411900234824655\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.3881856540084388,\n \"acc_stderr\": 0.0317229500433233,\n \"acc_norm\": 0.3881856540084388,\n \"acc_norm_stderr\": 0.0317229500433233\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.4798206278026906,\n \"acc_stderr\": 0.033530461674123,\n \"acc_norm\": 0.4798206278026906,\n \"acc_norm_stderr\": 0.033530461674123\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.4198473282442748,\n \"acc_stderr\": 0.04328577215262972,\n \"acc_norm\": 0.4198473282442748,\n \"acc_norm_stderr\": 0.04328577215262972\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.512396694214876,\n \"acc_stderr\": 0.045629515481807666,\n \"acc_norm\": 0.512396694214876,\n \"acc_norm_stderr\": 0.045629515481807666\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.4444444444444444,\n \"acc_stderr\": 0.04803752235190193,\n \"acc_norm\": 0.4444444444444444,\n \"acc_norm_stderr\": 0.04803752235190193\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.4049079754601227,\n \"acc_stderr\": 0.038566721635489125,\n \"acc_norm\": 0.4049079754601227,\n \"acc_norm_stderr\": 0.038566721635489125\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.3482142857142857,\n \"acc_stderr\": 0.04521829902833586,\n \"acc_norm\": 0.3482142857142857,\n \"acc_norm_stderr\": 0.04521829902833586\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.4077669902912621,\n \"acc_stderr\": 0.048657775704107675,\n \"acc_norm\": 0.4077669902912621,\n \"acc_norm_stderr\": 0.048657775704107675\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.6367521367521367,\n \"acc_stderr\": 0.03150712523091264,\n \"acc_norm\": 0.6367521367521367,\n \"acc_norm_stderr\": 0.03150712523091264\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.5491698595146871,\n \"acc_stderr\": 0.017793297572699034,\n \"acc_norm\": 0.5491698595146871,\n \"acc_norm_stderr\": 0.017793297572699034\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.42196531791907516,\n \"acc_stderr\": 0.02658923114217426,\n \"acc_norm\": 0.42196531791907516,\n \"acc_norm_stderr\": 0.02658923114217426\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23798882681564246,\n \"acc_stderr\": 0.014242630070574915,\n \"acc_norm\": 0.23798882681564246,\n \"acc_norm_stderr\": 0.014242630070574915\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.40522875816993464,\n \"acc_stderr\": 0.02811092849280907,\n \"acc_norm\": 0.40522875816993464,\n \"acc_norm_stderr\": 0.02811092849280907\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.5080385852090032,\n \"acc_stderr\": 0.028394421370984538,\n \"acc_norm\": 0.5080385852090032,\n \"acc_norm_stderr\": 0.028394421370984538\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.43209876543209874,\n \"acc_stderr\": 0.027563010971606676,\n \"acc_norm\": 0.43209876543209874,\n \"acc_norm_stderr\": 0.027563010971606676\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.30141843971631205,\n \"acc_stderr\": 0.02737412888263115,\n \"acc_norm\": 0.30141843971631205,\n \"acc_norm_stderr\": 0.02737412888263115\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.33572359843546284,\n \"acc_stderr\": 0.012061304157664607,\n \"acc_norm\": 0.33572359843546284,\n \"acc_norm_stderr\": 0.012061304157664607\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.24632352941176472,\n \"acc_stderr\": 0.02617343857052,\n \"acc_norm\": 0.24632352941176472,\n \"acc_norm_stderr\": 0.02617343857052\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.4035947712418301,\n \"acc_stderr\": 0.019848280168401154,\n \"acc_norm\": 0.4035947712418301,\n \"acc_norm_stderr\": 0.019848280168401154\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.4636363636363636,\n \"acc_stderr\": 0.047764491623961985,\n \"acc_norm\": 0.4636363636363636,\n \"acc_norm_stderr\": 0.047764491623961985\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.2653061224489796,\n \"acc_stderr\": 0.028263889943784606,\n \"acc_norm\": 0.2653061224489796,\n \"acc_norm_stderr\": 0.028263889943784606\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.5572139303482587,\n \"acc_stderr\": 0.03512310964123937,\n \"acc_norm\": 0.5572139303482587,\n \"acc_norm_stderr\": 0.03512310964123937\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.3855421686746988,\n \"acc_stderr\": 0.037891344246115496,\n \"acc_norm\": 0.3855421686746988,\n \"acc_norm_stderr\": 0.037891344246115496\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.6140350877192983,\n \"acc_stderr\": 0.03733756969066165,\n \"acc_norm\": 0.6140350877192983,\n \"acc_norm_stderr\": 0.03733756969066165\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2692778457772338,\n \"mc1_stderr\": 0.015528566637087281,\n \"mc2\": 0.4164930056701617,\n \"mc2_stderr\": 0.013916947335276144\n }\n}\n```", "repo_url": "https://huggingface.co/u-chom/preded-title-amazongoogle-abtbuy", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|arc:challenge|25_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hellaswag|10_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-08T23-27-01.372351.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-08T23-27-01.372351.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_08T23_27_01.372351", "path": ["results_2023-10-08T23-27-01.372351.parquet"]}, {"split": "latest", "path": ["results_2023-10-08T23-27-01.372351.parquet"]}]}]}
2023-10-08T22:28:24+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of u-chom/preded-title-amazongoogle-abtbuy ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model u-chom/preded-title-amazongoogle-abtbuy on the Open LLM Leaderboard. The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-08T23:27:01.372351(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of u-chom/preded-title-amazongoogle-abtbuy", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model u-chom/preded-title-amazongoogle-abtbuy on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-08T23:27:01.372351(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of u-chom/preded-title-amazongoogle-abtbuy", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model u-chom/preded-title-amazongoogle-abtbuy on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-08T23:27:01.372351(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 26, 31, 174, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of u-chom/preded-title-amazongoogle-abtbuy## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model u-chom/preded-title-amazongoogle-abtbuy on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-08T23:27:01.372351(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
bcbc246b31d6f13a64c6fd6972211e0843054da2
# Dataset Card for "storycommonsense-chatml" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
julep-ai-archive/storycommonsense-chatml
[ "region:us" ]
2023-10-08T22:31:35+00:00
{"dataset_info": {"features": [{"name": "chatml", "list": [{"name": "content", "dtype": "string"}, {"name": "name", "dtype": "string"}, {"name": "role", "dtype": "string"}]}], "splits": [{"name": "train", "num_bytes": 10730524, "num_examples": 4827}], "download_size": 2141314, "dataset_size": 10730524}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-27T04:20:08+00:00
[]
[]
TAGS #region-us
# Dataset Card for "storycommonsense-chatml" More Information needed
[ "# Dataset Card for \"storycommonsense-chatml\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"storycommonsense-chatml\"\n\nMore Information needed" ]
[ 6, 17 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"storycommonsense-chatml\"\n\nMore Information needed" ]
dd692fcb6e84d462fd2fd4b3144a60fe56700e4f
# Dataset Card for OCFBench [[Paper]](https://arxiv.org/abs/2310.11239) [[Code]](https://github.com/ai4ce/Occ4cast/) [[Website]](https://ai4ce.github.io/Occ4cast/) <!-- Provide a quick summary of the dataset. --> The OCFBench dataset is curated in the paper [**Occ4cast: LiDAR-based 4D Occupancy Completion and Forecasting**](https://arxiv.org/abs/2310.11239). The dataset is processed from public autonomous driving data to support the training and evaluation of the novel **occupancy completion and forecasting (OCF)** task. # Uses Please download each `.sqf` file from individual directories and mount them to local system for usage. For larger files that are splited into several parts, please run the following code to merge the parts before mounting: ``` cat output_prefix_* > merged.sqf ``` Please refer to our [GitHub repository](https://github.com/ai4ce/Occ4cast/) for dataset structure and loading details. ## Citation <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** ``` @article{Liu2023occ4cast, title={LiDAR-based 4D Occupancy Completion and Forecasting}, author={Xinhao Liu and Moonjun Gong and Qi Fang and Haoyu Xie and Yiming Li and Hang Zhao and Chen Feng}, journal={arXiv preprint arXiv:2310.11239}, year={2023} } ```
ai4ce/OCFBench
[ "size_categories:10K<n<100K", "language:en", "license:cc-by-nc-sa-4.0", "arxiv:2310.11239", "region:us" ]
2023-10-08T22:32:22+00:00
{"language": ["en"], "license": "cc-by-nc-sa-4.0", "size_categories": ["10K<n<100K"], "pretty_name": "OCFBench"}
2023-11-02T15:12:13+00:00
[ "2310.11239" ]
[ "en" ]
TAGS #size_categories-10K<n<100K #language-English #license-cc-by-nc-sa-4.0 #arxiv-2310.11239 #region-us
# Dataset Card for OCFBench [[Paper]](URL [[Code]](URL [[Website]](URL The OCFBench dataset is curated in the paper Occ4cast: LiDAR-based 4D Occupancy Completion and Forecasting. The dataset is processed from public autonomous driving data to support the training and evaluation of the novel occupancy completion and forecasting (OCF) task. # Uses Please download each '.sqf' file from individual directories and mount them to local system for usage. For larger files that are splited into several parts, please run the following code to merge the parts before mounting: Please refer to our GitHub repository for dataset structure and loading details. BibTeX:
[ "# Dataset Card for OCFBench\n[[Paper]](URL\n[[Code]](URL\n[[Website]](URL\n\n\n\nThe OCFBench dataset is curated in the paper Occ4cast: LiDAR-based 4D Occupancy Completion and Forecasting.\nThe dataset is processed from public autonomous driving data to support the training and evaluation of the novel occupancy completion and forecasting (OCF) task.", "# Uses\n\nPlease download each '.sqf' file from individual directories and mount them to local system for usage.\n\nFor larger files that are splited into several parts, please run the following code to merge the parts before mounting:\n\n\n\nPlease refer to our GitHub repository for dataset structure and loading details.\n\nBibTeX:" ]
[ "TAGS\n#size_categories-10K<n<100K #language-English #license-cc-by-nc-sa-4.0 #arxiv-2310.11239 #region-us \n", "# Dataset Card for OCFBench\n[[Paper]](URL\n[[Code]](URL\n[[Website]](URL\n\n\n\nThe OCFBench dataset is curated in the paper Occ4cast: LiDAR-based 4D Occupancy Completion and Forecasting.\nThe dataset is processed from public autonomous driving data to support the training and evaluation of the novel occupancy completion and forecasting (OCF) task.", "# Uses\n\nPlease download each '.sqf' file from individual directories and mount them to local system for usage.\n\nFor larger files that are splited into several parts, please run the following code to merge the parts before mounting:\n\n\n\nPlease refer to our GitHub repository for dataset structure and loading details.\n\nBibTeX:" ]
[ 44, 95, 75 ]
[ "passage: TAGS\n#size_categories-10K<n<100K #language-English #license-cc-by-nc-sa-4.0 #arxiv-2310.11239 #region-us \n# Dataset Card for OCFBench\n[[Paper]](URL\n[[Code]](URL\n[[Website]](URL\n\n\n\nThe OCFBench dataset is curated in the paper Occ4cast: LiDAR-based 4D Occupancy Completion and Forecasting.\nThe dataset is processed from public autonomous driving data to support the training and evaluation of the novel occupancy completion and forecasting (OCF) task.# Uses\n\nPlease download each '.sqf' file from individual directories and mount them to local system for usage.\n\nFor larger files that are splited into several parts, please run the following code to merge the parts before mounting:\n\n\n\nPlease refer to our GitHub repository for dataset structure and loading details.\n\nBibTeX:" ]
50c2f122ae0df9f174eb33eeb8278c092cc9a692
# DataBack: Dataset of SAT Formulas and Backbone Variable Phases ## What is DataBack `DataBack` is a dataset that consists of 120,286 SAT formulas (in CNF format), each labeled with the phases of its backbone variables. `DataBack` contains two distinct subsets: the pre-training set, named `DataBack-PT`, and the fine-tuning set, named `DataBack-FT`, for pre-training and fine-tuning our `NeuroBack` model, respectively. To learn more about `NeuroBack` and `DataBack`, please refer to our [`NeuroBack paper`](https://arxiv.org/pdf/2110.14053.pdf). The state-of-the-art backbone extractor, [`CadiBack`](https://github.com/arminbiere/cadiback), has been employed to extract the backbone variable phases. To learn more about `CadiBack`, please refer to the [`CadiBack paper`](https://wenxiwang.github.io/papers/cadiback.pdf). ## Directory Structure ``` |- original # Original CNF formulas and their backbone variable phases | |- cnf_pt.tar.gz # CNF formulas for pre-training | |- bb_pt.tar.gz # Backbone phases for pre-training formulas | |- cnf_ft.tar.gz # CNF formulas for fine-tuning | |- bb_ft.tar.gz # Backbone phases for fine-tuning formulas | |- dual # Dual CNF formulas and their backbone variable phases | |- d_cnf_pt.tar.gz # Dual CNF formulas for pre-training | |- d_bb_pt.tar.gz # Backbone phases for dual pre-training formulas | |- d_cnf_ft.tar.gz # Dual CNF formulas for fine-tuning | |- d_bb_ft.tar.gz # Backbone phases for dual fine-tuning formulas ``` ## File Naming Convention In the original directory, each CNF tar file (**`cnf_*.tar.gz`**) contains compressed CNF files named: **`[cnf_name].[compression_format]`**, where **`[compression_format]`** could be bz2, lzma, xz, gz, etc. Correspondingly, each backbone tar file (**`bb_*.tar.gz`**) comprises compressed backbone files named: **`[cnf_name].backbone.xz`**. It is important to note that a compressed CNF file will always share its **`[cnf_name]`** with its associated compressed backbone file. For dual formulas and their corresponding backbone files, the naming convention remains consistent, but with an added **`d_`** prefix. ## Format of the Extracted Backbone File The extracted backbone file (`*.backbone`) adheres to the output format of [`CadiBack`](https://github.com/arminbiere/cadiback). ## References If you use `DataBack` in your research, please kindly cite the following papers. [`NeuroBack paper`](https://arxiv.org/pdf/2110.14053.pdf): ```bib @article{wang2023neuroback, author = {Wang, Wenxi and Hu, Yang and Tiwari, Mohit and Khurshid, Sarfraz and McMillan, Kenneth L. and Miikkulainen, Risto}, title = {NeuroBack: Improving CDCL SAT Solving using Graph Neural Networks}, journal={arXiv preprint arXiv:2110.14053}, year={2021} } ``` [`CadiBack paper`](https://wenxiwang.github.io/papers/cadiback.pdf): ```bib @inproceedings{biere2023cadiback, title={CadiBack: Extracting Backbones with CaDiCaL}, author={Biere, Armin and Froleyks, Nils and Wang, Wenxi}, booktitle={26th International Conference on Theory and Applications of Satisfiability Testing (SAT 2023)}, year={2023}, organization={Schloss Dagstuhl-Leibniz-Zentrum f{\"u}r Informatik} } ``` ## Contributors Wenxi Wang ([email protected]), Yang Hu ([email protected])
neuroback/DataBack
[ "arxiv:2110.14053", "region:us" ]
2023-10-08T22:35:37+00:00
{}
2024-01-28T20:47:08+00:00
[ "2110.14053" ]
[]
TAGS #arxiv-2110.14053 #region-us
# DataBack: Dataset of SAT Formulas and Backbone Variable Phases ## What is DataBack 'DataBack' is a dataset that consists of 120,286 SAT formulas (in CNF format), each labeled with the phases of its backbone variables. 'DataBack' contains two distinct subsets: the pre-training set, named 'DataBack-PT', and the fine-tuning set, named 'DataBack-FT', for pre-training and fine-tuning our 'NeuroBack' model, respectively. To learn more about 'NeuroBack' and 'DataBack', please refer to our 'NeuroBack paper'. The state-of-the-art backbone extractor, 'CadiBack', has been employed to extract the backbone variable phases. To learn more about 'CadiBack', please refer to the 'CadiBack paper'. ## Directory Structure ## File Naming Convention In the original directory, each CNF tar file ('cnf_*.URL') contains compressed CNF files named: '[cnf_name].[compression_format]', where '[compression_format]' could be bz2, lzma, xz, gz, etc. Correspondingly, each backbone tar file ('bb_*.URL') comprises compressed backbone files named: '[cnf_name].URL'. It is important to note that a compressed CNF file will always share its '[cnf_name]' with its associated compressed backbone file. For dual formulas and their corresponding backbone files, the naming convention remains consistent, but with an added 'd_' prefix. ## Format of the Extracted Backbone File The extracted backbone file ('*.backbone') adheres to the output format of 'CadiBack'. ## References If you use 'DataBack' in your research, please kindly cite the following papers. 'NeuroBack paper': 'CadiBack paper': ## Contributors Wenxi Wang (wenxiw@URL), Yang Hu (huyang@URL)
[ "# DataBack: Dataset of SAT Formulas and Backbone Variable Phases", "## What is DataBack\n'DataBack' is a dataset that consists of 120,286 SAT formulas (in CNF format), each labeled with the phases of its backbone variables. \n'DataBack' contains two distinct subsets: the pre-training set, named 'DataBack-PT', and the fine-tuning set, named 'DataBack-FT', for pre-training and fine-tuning our 'NeuroBack' model, respectively. To learn more about 'NeuroBack' and 'DataBack', please refer to our 'NeuroBack paper'. \n\nThe state-of-the-art backbone extractor, 'CadiBack', has been employed to extract the backbone variable phases. To learn more about 'CadiBack', please refer to the 'CadiBack paper'.", "## Directory Structure", "## File Naming Convention\nIn the original directory, each CNF tar file ('cnf_*.URL') contains compressed CNF files named: '[cnf_name].[compression_format]', where '[compression_format]' could be bz2, lzma, xz, gz, etc. Correspondingly, each backbone tar file ('bb_*.URL') comprises compressed backbone files named: '[cnf_name].URL'. It is important to note that a compressed CNF file will always share its '[cnf_name]' with its associated compressed backbone file.\n\nFor dual formulas and their corresponding backbone files, the naming convention remains consistent, but with an added 'd_' prefix.", "## Format of the Extracted Backbone File \nThe extracted backbone file ('*.backbone') adheres to the output format of 'CadiBack'.", "## References\nIf you use 'DataBack' in your research, please kindly cite the following papers.\n\n'NeuroBack paper':\n\n\n'CadiBack paper':", "## Contributors\nWenxi Wang (wenxiw@URL), Yang Hu (huyang@URL)" ]
[ "TAGS\n#arxiv-2110.14053 #region-us \n", "# DataBack: Dataset of SAT Formulas and Backbone Variable Phases", "## What is DataBack\n'DataBack' is a dataset that consists of 120,286 SAT formulas (in CNF format), each labeled with the phases of its backbone variables. \n'DataBack' contains two distinct subsets: the pre-training set, named 'DataBack-PT', and the fine-tuning set, named 'DataBack-FT', for pre-training and fine-tuning our 'NeuroBack' model, respectively. To learn more about 'NeuroBack' and 'DataBack', please refer to our 'NeuroBack paper'. \n\nThe state-of-the-art backbone extractor, 'CadiBack', has been employed to extract the backbone variable phases. To learn more about 'CadiBack', please refer to the 'CadiBack paper'.", "## Directory Structure", "## File Naming Convention\nIn the original directory, each CNF tar file ('cnf_*.URL') contains compressed CNF files named: '[cnf_name].[compression_format]', where '[compression_format]' could be bz2, lzma, xz, gz, etc. Correspondingly, each backbone tar file ('bb_*.URL') comprises compressed backbone files named: '[cnf_name].URL'. It is important to note that a compressed CNF file will always share its '[cnf_name]' with its associated compressed backbone file.\n\nFor dual formulas and their corresponding backbone files, the naming convention remains consistent, but with an added 'd_' prefix.", "## Format of the Extracted Backbone File \nThe extracted backbone file ('*.backbone') adheres to the output format of 'CadiBack'.", "## References\nIf you use 'DataBack' in your research, please kindly cite the following papers.\n\n'NeuroBack paper':\n\n\n'CadiBack paper':", "## Contributors\nWenxi Wang (wenxiw@URL), Yang Hu (huyang@URL)" ]
[ 15, 17, 187, 6, 185, 37, 37, 22 ]
[ "passage: TAGS\n#arxiv-2110.14053 #region-us \n# DataBack: Dataset of SAT Formulas and Backbone Variable Phases## What is DataBack\n'DataBack' is a dataset that consists of 120,286 SAT formulas (in CNF format), each labeled with the phases of its backbone variables. \n'DataBack' contains two distinct subsets: the pre-training set, named 'DataBack-PT', and the fine-tuning set, named 'DataBack-FT', for pre-training and fine-tuning our 'NeuroBack' model, respectively. To learn more about 'NeuroBack' and 'DataBack', please refer to our 'NeuroBack paper'. \n\nThe state-of-the-art backbone extractor, 'CadiBack', has been employed to extract the backbone variable phases. To learn more about 'CadiBack', please refer to the 'CadiBack paper'.## Directory Structure## File Naming Convention\nIn the original directory, each CNF tar file ('cnf_*.URL') contains compressed CNF files named: '[cnf_name].[compression_format]', where '[compression_format]' could be bz2, lzma, xz, gz, etc. Correspondingly, each backbone tar file ('bb_*.URL') comprises compressed backbone files named: '[cnf_name].URL'. It is important to note that a compressed CNF file will always share its '[cnf_name]' with its associated compressed backbone file.\n\nFor dual formulas and their corresponding backbone files, the naming convention remains consistent, but with an added 'd_' prefix.## Format of the Extracted Backbone File \nThe extracted backbone file ('*.backbone') adheres to the output format of 'CadiBack'.## References\nIf you use 'DataBack' in your research, please kindly cite the following papers.\n\n'NeuroBack paper':\n\n\n'CadiBack paper':## Contributors\nWenxi Wang (wenxiw@URL), Yang Hu (huyang@URL)" ]
ae458ee6e1bc1a7c438204b3a6b847267913171e
Just all the subjects from the US Library of Congress cleaned up into JSONL's, missing metadata. Grab the latest here: https://id.loc.gov/authorities/subjects.html
benxh/us-library-of-congress-subjects
[ "region:us" ]
2023-10-08T22:36:22+00:00
{}
2023-10-08T22:38:01+00:00
[]
[]
TAGS #region-us
Just all the subjects from the US Library of Congress cleaned up into JSONL's, missing metadata. Grab the latest here: URL
[]
[ "TAGS\n#region-us \n" ]
[ 6 ]
[ "passage: TAGS\n#region-us \n" ]
75d572409e3906fa1703198ca3239bcc4e311fd3
# Dataset Card for "celebA_spoof" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Ar4ikov/celebA_spoof
[ "region:us" ]
2023-10-09T00:02:20+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "valid", "path": "data/valid-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "Filepath", "dtype": "image"}, {"name": "Bbox", "sequence": "int64"}, {"name": "Class", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 46432284811.335, "num_examples": 419935}, {"name": "valid", "num_bytes": 4163631829.316, "num_examples": 46738}, {"name": "test", "num_bytes": 32416692607.675, "num_examples": 59191}], "download_size": 72011056582, "dataset_size": 83012609248.326}}
2023-10-09T02:25:23+00:00
[]
[]
TAGS #region-us
# Dataset Card for "celebA_spoof" More Information needed
[ "# Dataset Card for \"celebA_spoof\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"celebA_spoof\"\n\nMore Information needed" ]
[ 6, 16 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"celebA_spoof\"\n\nMore Information needed" ]
459bcf72160d6c5127162654eeb69e7398becf42
# Dataset Card for "edu-crawl-with-date" Data crawl education với dữ liệu thời gian (tháng/năm) Dữ liệu thời gian được cập nhật theo cách sau: - chiết xuất từ văn bản - crawl lại một số trang (hiếm) Hiện tại có: 190692 dòng có dữ liệu thời gian ~= 68.37 % [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
nlplabtdtu/edu-crawl-with-date
[ "region:us" ]
2023-10-09T00:32:21+00:00
{"dataset_info": {"features": [{"name": "title", "dtype": "string"}, {"name": "url", "dtype": "string"}, {"name": "body", "dtype": "string"}, {"name": "date", "dtype": "string"}, {"name": "flt_dates", "sequence": "string"}], "splits": [{"name": "train", "num_bytes": 1070649713, "num_examples": 278902}], "download_size": 387393861, "dataset_size": 1070649713}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-09T00:49:10+00:00
[]
[]
TAGS #region-us
# Dataset Card for "edu-crawl-with-date" Data crawl education với dữ liệu thời gian (tháng/năm) Dữ liệu thời gian được cập nhật theo cách sau: - chiết xuất từ văn bản - crawl lại một số trang (hiếm) Hiện tại có: 190692 dòng có dữ liệu thời gian ~= 68.37 % More Information needed
[ "# Dataset Card for \"edu-crawl-with-date\"\n\nData crawl education với dữ liệu thời gian (tháng/năm)\nDữ liệu thời gian được cập nhật theo cách sau:\n- chiết xuất từ văn bản\n- crawl lại một số trang (hiếm)\n\nHiện tại có: 190692 dòng có dữ liệu thời gian ~= 68.37 %\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"edu-crawl-with-date\"\n\nData crawl education với dữ liệu thời gian (tháng/năm)\nDữ liệu thời gian được cập nhật theo cách sau:\n- chiết xuất từ văn bản\n- crawl lại một số trang (hiếm)\n\nHiện tại có: 190692 dòng có dữ liệu thời gian ~= 68.37 %\n\nMore Information needed" ]
[ 6, 80 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"edu-crawl-with-date\"\n\nData crawl education với dữ liệu thời gian (tháng/năm)\nDữ liệu thời gian được cập nhật theo cách sau:\n- chiết xuất từ văn bản\n- crawl lại một số trang (hiếm)\n\nHiện tại có: 190692 dòng có dữ liệu thời gian ~= 68.37 %\n\nMore Information needed" ]
c9a5d5064052263be6fbcb0088a72d0d2645fcea
# Dataset Card for Evaluation run of s1ghhh/medllama-2-70b-qlora-1.1 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/s1ghhh/medllama-2-70b-qlora-1.1 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [s1ghhh/medllama-2-70b-qlora-1.1](https://huggingface.co/s1ghhh/medllama-2-70b-qlora-1.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_s1ghhh__medllama-2-70b-qlora-1.1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-28T23:37:36.261412](https://huggingface.co/datasets/open-llm-leaderboard/details_s1ghhh__medllama-2-70b-qlora-1.1/blob/main/results_2023-10-28T23-37-36.261412.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.4476719798657718, "em_stderr": 0.005092348829658167, "f1": 0.49099203020134397, "f1_stderr": 0.004914477006067904, "acc": 0.5814221507886975, "acc_stderr": 0.011551816841221033 }, "harness|drop|3": { "em": 0.4476719798657718, "em_stderr": 0.005092348829658167, "f1": 0.49099203020134397, "f1_stderr": 0.004914477006067904 }, "harness|gsm8k|5": { "acc": 0.3206974981046247, "acc_stderr": 0.012856468433722304 }, "harness|winogrande|5": { "acc": 0.8421468034727704, "acc_stderr": 0.010247165248719763 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_s1ghhh__medllama-2-70b-qlora-1.1
[ "region:us" ]
2023-10-09T00:34:51+00:00
{"pretty_name": "Evaluation run of s1ghhh/medllama-2-70b-qlora-1.1", "dataset_summary": "Dataset automatically created during the evaluation run of model [s1ghhh/medllama-2-70b-qlora-1.1](https://huggingface.co/s1ghhh/medllama-2-70b-qlora-1.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_s1ghhh__medllama-2-70b-qlora-1.1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-28T23:37:36.261412](https://huggingface.co/datasets/open-llm-leaderboard/details_s1ghhh__medllama-2-70b-qlora-1.1/blob/main/results_2023-10-28T23-37-36.261412.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.4476719798657718,\n \"em_stderr\": 0.005092348829658167,\n \"f1\": 0.49099203020134397,\n \"f1_stderr\": 0.004914477006067904,\n \"acc\": 0.5814221507886975,\n \"acc_stderr\": 0.011551816841221033\n },\n \"harness|drop|3\": {\n \"em\": 0.4476719798657718,\n \"em_stderr\": 0.005092348829658167,\n \"f1\": 0.49099203020134397,\n \"f1_stderr\": 0.004914477006067904\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.3206974981046247,\n \"acc_stderr\": 0.012856468433722304\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8421468034727704,\n \"acc_stderr\": 0.010247165248719763\n }\n}\n```", "repo_url": "https://huggingface.co/s1ghhh/medllama-2-70b-qlora-1.1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|arc:challenge|25_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_28T23_37_36.261412", "path": ["**/details_harness|drop|3_2023-10-28T23-37-36.261412.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-28T23-37-36.261412.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_28T23_37_36.261412", "path": ["**/details_harness|gsm8k|5_2023-10-28T23-37-36.261412.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-28T23-37-36.261412.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hellaswag|10_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-09T01-34-27.623935.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-09T01-34-27.623935.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-09T01-34-27.623935.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_28T23_37_36.261412", "path": ["**/details_harness|winogrande|5_2023-10-28T23-37-36.261412.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-28T23-37-36.261412.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_09T01_34_27.623935", "path": ["results_2023-10-09T01-34-27.623935.parquet"]}, {"split": "2023_10_28T23_37_36.261412", "path": ["results_2023-10-28T23-37-36.261412.parquet"]}, {"split": "latest", "path": ["results_2023-10-28T23-37-36.261412.parquet"]}]}]}
2023-10-28T22:37:48+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of s1ghhh/medllama-2-70b-qlora-1.1 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model s1ghhh/medllama-2-70b-qlora-1.1 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-28T23:37:36.261412(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of s1ghhh/medllama-2-70b-qlora-1.1", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model s1ghhh/medllama-2-70b-qlora-1.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-28T23:37:36.261412(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of s1ghhh/medllama-2-70b-qlora-1.1", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model s1ghhh/medllama-2-70b-qlora-1.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-28T23:37:36.261412(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 25, 31, 173, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of s1ghhh/medllama-2-70b-qlora-1.1## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model s1ghhh/medllama-2-70b-qlora-1.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-28T23:37:36.261412(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
98547ad8ca1205e2e5ce564343cb78f972f72ffa
[TinyStories](https://huggingface.co/datasets/roneneldan/TinyStories)数据集的中文翻译版。只翻译了`story`字段(翻译后字段为`story_zh`): ```json { "story": "\n\nLily and Ben are friends. They like to play in the park. One day, they see a big tree with a swing. Lily wants to try the swing. She runs to the tree and climbs on the swing.\n\"Push me, Ben!\" she says. Ben pushes her gently. Lily feels happy. She swings higher and higher. She laughs and shouts.\nBen watches Lily. He thinks she is cute. He wants to swing too. He waits for Lily to stop. But Lily does not stop. She swings faster and faster. She is having too much fun.\n\"Can I swing too, Lily?\" Ben asks. Lily does not hear him. She is too busy swinging. Ben feels sad. He walks away.\nLily swings so high that she loses her grip. She falls off the swing. She lands on the ground. She hurts her foot. She cries.\n\"Ow, ow, ow!\" she says. She looks for Ben. She wants him to help her. But Ben is not there. He is gone.\nLily feels sorry. She wishes she had shared the swing with Ben. She wishes he was there to hug her. She limps to the tree. She sees something hanging from a branch. It is Ben's hat. He left it for her.\nLily smiles. She thinks Ben is nice. She puts on his hat. She hopes he will come back. She wants to say sorry. She wants to be friends again.", "instruction": { "prompt:": "Write a short story (3-5 paragraphs) which only uses very simple words that a 3 year old child would understand. The story should use the verb \"hang\", the noun \"foot\" and the adjective \"cute\". The story has the following features: the story should contain at least one dialogue. Remember to only use simple words!\n\nPossible story:", "words": [ "hang", "foot", "cute" ], "features": [ "Dialogue" ] }, "summary": "Lily and Ben play in the park and Lily gets too caught up in swinging, causing Ben to leave. Lily falls off the swing and hurts herself, but Ben leaves his hat for her as a kind gesture.", "source": "GPT-4", "story_zh": "莉莉和本是朋友。他们喜欢在公园里玩。有一天,他们在一棵大树下看到了一个秋千。莉莉想试试那个秋千。她跑到树下,爬上了秋千。\n\"推我,本!\"她说。本轻轻地推了她一下。莉莉感到很开心。她越荡越高,笑着喊叫。\n本看着莉莉。他觉得她很可爱。他也想荡秋千。他在莉莉停下来之后等着。但是莉莉没有停下来。她越荡越快。她玩得太高兴了。\n\"我也可以荡秋千吗,莉莉?\"本问。莉莉没听到他的话。她忙着荡秋千。本觉得很难过。他走开了。\n莉莉荡得太高,失去了平衡。她从秋千上摔下来,落在地上。她扭伤了脚。她哭了起来。\n\"哎呀,哎呀,哎呀!\"她说。她在找本。她希望他能帮助她。但本不在那里。他走了。\n莉莉感到很抱歉。她希望她能和本分享秋千。她希望他在那里拥抱她。她一瘸一拐地走到树下。她看到有什么东西挂在树枝上。那是本的帽子。他留给她的。\n莉莉笑了。她觉得本很好。她戴上了他的帽子。她希望他会回来。她想道歉。她想再次成为朋友。" } ``` 可以看一下翻译效果: ```text Lily and Ben are friends. They like to play in the park. One day, they see a big tree with a swing. Lily wants to try the swing. She runs to the tree and climbs on the swing. "Push me, Ben!" she says. Ben pushes her gently. Lily feels happy. She swings higher and higher. She laughs and shouts. Ben watches Lily. He thinks she is cute. He wants to swing too. He waits for Lily to stop. But Lily does not stop. She swings faster and faster. She is having too much fun. "Can I swing too, Lily?" Ben asks. Lily does not hear him. She is too busy swinging. Ben feels sad. He walks away. Lily swings so high that she loses her grip. She falls off the swing. She lands on the ground. She hurts her foot. She cries. "Ow, ow, ow!" she says. She looks for Ben. She wants him to help her. But Ben is not there. He is gone. Lily feels sorry. She wishes she had shared the swing with Ben. She wishes he was there to hug her. She limps to the tree. She sees something hanging from a branch. It is Ben's hat. He left it for her. Lily smiles. She thinks Ben is nice. She puts on his hat. She hopes he will come back. She wants to say sorry. She wants to be friends again. 莉莉和本是朋友。他们喜欢在公园里玩。有一天,他们在一棵大树下看到了一个秋千。莉莉想试试那个秋千。她跑到树下,爬上了秋千。 "推我,本!"她说。本轻轻地推了她一下。莉莉感到很开心。她越荡越高,笑着喊叫。 本看着莉莉。他觉得她很可爱。他也想荡秋千。他在莉莉停下来之后等着。但是莉莉没有停下来。她越荡越快。她玩得太高兴了。 "我也可以荡秋千吗,莉莉?"本问。莉莉没听到他的话。她忙着荡秋千。本觉得很难过。他走开了。 莉莉荡得太高,失去了平衡。她从秋千上摔下来,落在地上。她扭伤了脚。她哭了起来。 "哎呀,哎呀,哎呀!"她说。她在找本。她希望他能帮助她。但本不在那里。他走了。 莉莉感到很抱歉。她希望她能和本分享秋千。她希望他在那里拥抱她。她一瘸一拐地走到树下。她看到有什么东西挂在树枝上。那是本的帽子。他留给她的。 莉莉笑了。她觉得本很好。她戴上了他的帽子。她希望他会回来。她想道歉。她想再次成为朋友。 ``` ```text Once upon a time, there was a little girl named Lily. She had a teddy bear that she loved so much. One day, she lost it while playing in the park. She looked everywhere, but she couldn't find it. She felt sad and scared without her teddy bear. Lily's mommy saw her crying and asked what was wrong. Lily told her that she lost her teddy bear. Mommy hugged her and said, "Don't worry, we'll search for it together." They went back to the park and looked everywhere. After a while, they found the teddy bear under a tree. Lily was so happy! She hugged her teddy bear and felt comfortable again. She said, "I hope I never lose you again, teddy bear." Mommy smiled and said, "Me too, Lily. You and teddy bear are the best of friends." And they all went home, happy and content. The end. 从前,有一个小女孩叫莉莉。她非常喜欢她的泰迪熊。有一天,她在公园里玩时把它弄丢了。她找遍了所有地方,但仍然找不到它。没有她的泰迪熊,她感到很难过和害怕。 莉莉的妈妈看到她哭泣,问她发生了什么事。莉莉告诉她自己把泰迪熊弄丢了。妈妈抱住她说:“别担心,我们会一起去找的。”他们回到公园,到处寻找。过了一会儿,他们在树下找到了泰迪熊。莉莉非常高兴! 她拥抱了她的泰迪熊,感觉又舒服了。她说:“我希望我再也不要失去你,泰迪熊。”妈妈笑着说:“我也这么想,莉莉。你和泰迪熊是最好的朋友。”然后他们都高高兴兴地回家了,感到非常满足。结束。 ``` ```text Once upon a time, there was a cute puppy named Max. Max was very adorable with his big, brown eyes and wagging tail. One day, Max's owner, Emily, told him that they needed to go to the post office to mail a letter. Max didn't know what that meant, but he was excited to go for a car ride. At the post office, Emily gave the letter to the nice lady behind the desk. The lady asked Emily for a number and Emily gave her one. Max didn't know what a number was, but he saw the lady type something on the computer. After they mailed the letter, Emily and Max went back to the car. Max was happy that they went on an adventure and he couldn't wait for the next one. 从前,有一只可爱的狗狗名叫Max。Max 非常可爱,大大的棕色眼睛和摇摆的尾巴都让人喜欢。有一天,Emily告诉Max他们需要去邮局寄一封信。Max并不知道那是什么意思,但他很兴奋能去兜风。 在邮局,Emily把信交给柜台后面友好的女士。女士问Emily要了一个号码,Emily给了她一个。Max并不知道什么是号码,但看到女士在电脑上输入了一些东西。 寄完信后,Emily和Max回到了车里。Max很高兴他们去了一趟冒险,他迫不及待地期待着下一次冒险。 ``` ```text One day, a kind and honest cat named Tom found a pretty velvet ribbon. He wanted to hang it on his door. But when he tried to hang it, he saw his friend, a small bird named Sue, was sad. Sue wanted the velvet ribbon too. Tom did not want to make Sue sad. So, he thought of a way to share the ribbon. He knew that they both liked to play games. Tom said, "Let's play a game. We can take turns to have the ribbon. Today, I will hang it on my door. Tomorrow, you can hang it on your tree." Sue liked this idea. They played and shared the velvet ribbon every day. Tom and Sue were both happy. They learned that sharing is a good way to solve problems and stay friends. 一天,一只名叫汤姆的善良诚实的猫发现了一条漂亮的天鹅绒彩带。他想把它挂在门上。但当他尝试挂上时,看到他的朋友,一只名叫苏的小鸟,很伤心。 苏也想要这条天鹅绒彩带。汤姆不想让苏伤心。所以,他想到了一个分享彩带的方法。他知道他们都喜欢玩游戏。 汤姆说:“我们来玩个游戏吧。我们可以轮流拥有这条彩带。今天,我把它挂在我门上。明天,你可以把它挂在你树上。” 苏喜欢这个主意。他们每天都会玩游戏并分享这根天鹅绒彩带。汤姆和苏都很开心。他们学会了分享是一种解决问题的良好方法,也是保持友谊的好方法。 ```
adam89/TinyStoriesChinese
[ "license:cdla-sharing-1.0", "region:us" ]
2023-10-09T01:02:03+00:00
{"license": "cdla-sharing-1.0"}
2023-10-10T07:18:25+00:00
[]
[]
TAGS #license-cdla-sharing-1.0 #region-us
TinyStories数据集的中文翻译版。只翻译了'story'字段(翻译后字段为'story_zh'): 可以看一下翻译效果:
[]
[ "TAGS\n#license-cdla-sharing-1.0 #region-us \n" ]
[ 17 ]
[ "passage: TAGS\n#license-cdla-sharing-1.0 #region-us \n" ]
0245113727f78e32fc7b6ef59b54799be397c92f
# Dataset Card for Evaluation run of pankajmathur/model_007 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/pankajmathur/model_007 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [pankajmathur/model_007](https://huggingface.co/pankajmathur/model_007) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_pankajmathur__model_007", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-10-09T02:03:09.335068](https://huggingface.co/datasets/open-llm-leaderboard/details_pankajmathur__model_007/blob/main/results_2023-10-09T02-03-09.335068.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6901502879968988, "acc_stderr": 0.031344534847114004, "acc_norm": 0.6939037892141556, "acc_norm_stderr": 0.03131458982120537, "mc1": 0.44920440636474906, "mc1_stderr": 0.01741294198611531, "mc2": 0.6312306236860621, "mc2_stderr": 0.014945471343395618 }, "harness|arc:challenge|25": { "acc": 0.6749146757679181, "acc_stderr": 0.01368814730972912, "acc_norm": 0.7107508532423208, "acc_norm_stderr": 0.013250012579393441 }, "harness|hellaswag|10": { "acc": 0.6908982274447322, "acc_stderr": 0.004611787665905346, "acc_norm": 0.8765186217884884, "acc_norm_stderr": 0.003283165867631372 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.37, "acc_stderr": 0.04852365870939098, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939098 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6444444444444445, "acc_stderr": 0.04135176749720385, "acc_norm": 0.6444444444444445, "acc_norm_stderr": 0.04135176749720385 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.8157894736842105, "acc_stderr": 0.0315469804508223, "acc_norm": 0.8157894736842105, "acc_norm_stderr": 0.0315469804508223 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.76, "acc_stderr": 0.04292346959909283, "acc_norm": 0.76, "acc_norm_stderr": 0.04292346959909283 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7320754716981132, "acc_stderr": 0.027257260322494845, "acc_norm": 0.7320754716981132, "acc_norm_stderr": 0.027257260322494845 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.8125, "acc_stderr": 0.032639560491693344, "acc_norm": 0.8125, "acc_norm_stderr": 0.032639560491693344 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.653179190751445, "acc_stderr": 0.036291466701596636, "acc_norm": 0.653179190751445, "acc_norm_stderr": 0.036291466701596636 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3627450980392157, "acc_stderr": 0.047840607041056527, "acc_norm": 0.3627450980392157, "acc_norm_stderr": 0.047840607041056527 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.76, "acc_stderr": 0.042923469599092816, "acc_norm": 0.76, "acc_norm_stderr": 0.042923469599092816 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6680851063829787, "acc_stderr": 0.030783736757745657, "acc_norm": 0.6680851063829787, "acc_norm_stderr": 0.030783736757745657 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4298245614035088, "acc_stderr": 0.046570472605949625, "acc_norm": 0.4298245614035088, "acc_norm_stderr": 0.046570472605949625 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6068965517241379, "acc_stderr": 0.0407032901370707, "acc_norm": 0.6068965517241379, "acc_norm_stderr": 0.0407032901370707 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.455026455026455, "acc_stderr": 0.025646928361049398, "acc_norm": 0.455026455026455, "acc_norm_stderr": 0.025646928361049398 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.46825396825396826, "acc_stderr": 0.04463112720677173, "acc_norm": 0.46825396825396826, "acc_norm_stderr": 0.04463112720677173 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.45, "acc_stderr": 0.049999999999999996, "acc_norm": 0.45, "acc_norm_stderr": 0.049999999999999996 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8129032258064516, "acc_stderr": 0.022185710092252252, "acc_norm": 0.8129032258064516, "acc_norm_stderr": 0.022185710092252252 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5369458128078818, "acc_stderr": 0.035083705204426656, "acc_norm": 0.5369458128078818, "acc_norm_stderr": 0.035083705204426656 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8666666666666667, "acc_stderr": 0.026544435312706467, "acc_norm": 0.8666666666666667, "acc_norm_stderr": 0.026544435312706467 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8888888888888888, "acc_stderr": 0.02239078763821678, "acc_norm": 0.8888888888888888, "acc_norm_stderr": 0.02239078763821678 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9326424870466321, "acc_stderr": 0.018088393839078894, "acc_norm": 0.9326424870466321, "acc_norm_stderr": 0.018088393839078894 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.7102564102564103, "acc_stderr": 0.023000628243687968, "acc_norm": 0.7102564102564103, "acc_norm_stderr": 0.023000628243687968 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.32222222222222224, "acc_stderr": 0.028493465091028597, "acc_norm": 0.32222222222222224, "acc_norm_stderr": 0.028493465091028597 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7436974789915967, "acc_stderr": 0.02835962087053395, "acc_norm": 0.7436974789915967, "acc_norm_stderr": 0.02835962087053395 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.4768211920529801, "acc_stderr": 0.04078093859163083, "acc_norm": 0.4768211920529801, "acc_norm_stderr": 0.04078093859163083 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8899082568807339, "acc_stderr": 0.0134199390186812, "acc_norm": 0.8899082568807339, "acc_norm_stderr": 0.0134199390186812 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5416666666666666, "acc_stderr": 0.03398110890294636, "acc_norm": 0.5416666666666666, "acc_norm_stderr": 0.03398110890294636 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.9215686274509803, "acc_stderr": 0.018869514646658925, "acc_norm": 0.9215686274509803, "acc_norm_stderr": 0.018869514646658925 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8734177215189873, "acc_stderr": 0.021644195727955173, "acc_norm": 0.8734177215189873, "acc_norm_stderr": 0.021644195727955173 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7982062780269058, "acc_stderr": 0.026936111912802273, "acc_norm": 0.7982062780269058, "acc_norm_stderr": 0.026936111912802273 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.816793893129771, "acc_stderr": 0.03392770926494733, "acc_norm": 0.816793893129771, "acc_norm_stderr": 0.03392770926494733 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8677685950413223, "acc_stderr": 0.030922788320445815, "acc_norm": 0.8677685950413223, "acc_norm_stderr": 0.030922788320445815 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8240740740740741, "acc_stderr": 0.036809181416738807, "acc_norm": 0.8240740740740741, "acc_norm_stderr": 0.036809181416738807 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7791411042944786, "acc_stderr": 0.03259177392742179, "acc_norm": 0.7791411042944786, "acc_norm_stderr": 0.03259177392742179 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.49107142857142855, "acc_stderr": 0.04745033255489123, "acc_norm": 0.49107142857142855, "acc_norm_stderr": 0.04745033255489123 }, "harness|hendrycksTest-management|5": { "acc": 0.8058252427184466, "acc_stderr": 0.03916667762822582, "acc_norm": 0.8058252427184466, "acc_norm_stderr": 0.03916667762822582 }, "harness|hendrycksTest-marketing|5": { "acc": 0.9017094017094017, "acc_stderr": 0.019503444900757567, "acc_norm": 0.9017094017094017, "acc_norm_stderr": 0.019503444900757567 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8748403575989783, "acc_stderr": 0.011832954239305724, "acc_norm": 0.8748403575989783, "acc_norm_stderr": 0.011832954239305724 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7630057803468208, "acc_stderr": 0.02289408248992599, "acc_norm": 0.7630057803468208, "acc_norm_stderr": 0.02289408248992599 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.5497206703910614, "acc_stderr": 0.016639615236845817, "acc_norm": 0.5497206703910614, "acc_norm_stderr": 0.016639615236845817 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7254901960784313, "acc_stderr": 0.02555316999182651, "acc_norm": 0.7254901960784313, "acc_norm_stderr": 0.02555316999182651 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7684887459807074, "acc_stderr": 0.023956532766639133, "acc_norm": 0.7684887459807074, "acc_norm_stderr": 0.023956532766639133 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8055555555555556, "acc_stderr": 0.02202136610022019, "acc_norm": 0.8055555555555556, "acc_norm_stderr": 0.02202136610022019 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5460992907801419, "acc_stderr": 0.029700453247291477, "acc_norm": 0.5460992907801419, "acc_norm_stderr": 0.029700453247291477 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.545632333767927, "acc_stderr": 0.012716941720734818, "acc_norm": 0.545632333767927, "acc_norm_stderr": 0.012716941720734818 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7316176470588235, "acc_stderr": 0.026917481224377197, "acc_norm": 0.7316176470588235, "acc_norm_stderr": 0.026917481224377197 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.75, "acc_stderr": 0.01751781884501444, "acc_norm": 0.75, "acc_norm_stderr": 0.01751781884501444 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7363636363636363, "acc_stderr": 0.04220224692971987, "acc_norm": 0.7363636363636363, "acc_norm_stderr": 0.04220224692971987 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7918367346938775, "acc_stderr": 0.025991117672813296, "acc_norm": 0.7918367346938775, "acc_norm_stderr": 0.025991117672813296 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8557213930348259, "acc_stderr": 0.02484575321230604, "acc_norm": 0.8557213930348259, "acc_norm_stderr": 0.02484575321230604 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.86, "acc_stderr": 0.03487350880197769, "acc_norm": 0.86, "acc_norm_stderr": 0.03487350880197769 }, "harness|hendrycksTest-virology|5": { "acc": 0.5421686746987951, "acc_stderr": 0.0387862677100236, "acc_norm": 0.5421686746987951, "acc_norm_stderr": 0.0387862677100236 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8654970760233918, "acc_stderr": 0.026168221344662297, "acc_norm": 0.8654970760233918, "acc_norm_stderr": 0.026168221344662297 }, "harness|truthfulqa:mc|0": { "mc1": 0.44920440636474906, "mc1_stderr": 0.01741294198611531, "mc2": 0.6312306236860621, "mc2_stderr": 0.014945471343395618 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_pankajmathur__model_007
[ "region:us" ]
2023-10-09T01:03:33+00:00
{"pretty_name": "Evaluation run of pankajmathur/model_007", "dataset_summary": "Dataset automatically created during the evaluation run of model [pankajmathur/model_007](https://huggingface.co/pankajmathur/model_007) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_pankajmathur__model_007\",\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-09T02:03:09.335068](https://huggingface.co/datasets/open-llm-leaderboard/details_pankajmathur__model_007/blob/main/results_2023-10-09T02-03-09.335068.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6901502879968988,\n \"acc_stderr\": 0.031344534847114004,\n \"acc_norm\": 0.6939037892141556,\n \"acc_norm_stderr\": 0.03131458982120537,\n \"mc1\": 0.44920440636474906,\n \"mc1_stderr\": 0.01741294198611531,\n \"mc2\": 0.6312306236860621,\n \"mc2_stderr\": 0.014945471343395618\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6749146757679181,\n \"acc_stderr\": 0.01368814730972912,\n \"acc_norm\": 0.7107508532423208,\n \"acc_norm_stderr\": 0.013250012579393441\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6908982274447322,\n \"acc_stderr\": 0.004611787665905346,\n \"acc_norm\": 0.8765186217884884,\n \"acc_norm_stderr\": 0.003283165867631372\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939098,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939098\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6444444444444445,\n \"acc_stderr\": 0.04135176749720385,\n \"acc_norm\": 0.6444444444444445,\n \"acc_norm_stderr\": 0.04135176749720385\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.8157894736842105,\n \"acc_stderr\": 0.0315469804508223,\n \"acc_norm\": 0.8157894736842105,\n \"acc_norm_stderr\": 0.0315469804508223\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.04292346959909283,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.04292346959909283\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7320754716981132,\n \"acc_stderr\": 0.027257260322494845,\n \"acc_norm\": 0.7320754716981132,\n \"acc_norm_stderr\": 0.027257260322494845\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.8125,\n \"acc_stderr\": 0.032639560491693344,\n \"acc_norm\": 0.8125,\n \"acc_norm_stderr\": 0.032639560491693344\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.653179190751445,\n \"acc_stderr\": 0.036291466701596636,\n \"acc_norm\": 0.653179190751445,\n \"acc_norm_stderr\": 0.036291466701596636\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.3627450980392157,\n \"acc_stderr\": 0.047840607041056527,\n \"acc_norm\": 0.3627450980392157,\n \"acc_norm_stderr\": 0.047840607041056527\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6680851063829787,\n \"acc_stderr\": 0.030783736757745657,\n \"acc_norm\": 0.6680851063829787,\n \"acc_norm_stderr\": 0.030783736757745657\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4298245614035088,\n \"acc_stderr\": 0.046570472605949625,\n \"acc_norm\": 0.4298245614035088,\n \"acc_norm_stderr\": 0.046570472605949625\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6068965517241379,\n \"acc_stderr\": 0.0407032901370707,\n \"acc_norm\": 0.6068965517241379,\n \"acc_norm_stderr\": 0.0407032901370707\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.455026455026455,\n \"acc_stderr\": 0.025646928361049398,\n \"acc_norm\": 0.455026455026455,\n \"acc_norm_stderr\": 0.025646928361049398\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.46825396825396826,\n \"acc_stderr\": 0.04463112720677173,\n \"acc_norm\": 0.46825396825396826,\n \"acc_norm_stderr\": 0.04463112720677173\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.049999999999999996,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.049999999999999996\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8129032258064516,\n \"acc_stderr\": 0.022185710092252252,\n \"acc_norm\": 0.8129032258064516,\n \"acc_norm_stderr\": 0.022185710092252252\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5369458128078818,\n \"acc_stderr\": 0.035083705204426656,\n \"acc_norm\": 0.5369458128078818,\n \"acc_norm_stderr\": 0.035083705204426656\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8666666666666667,\n \"acc_stderr\": 0.026544435312706467,\n \"acc_norm\": 0.8666666666666667,\n \"acc_norm_stderr\": 0.026544435312706467\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8888888888888888,\n \"acc_stderr\": 0.02239078763821678,\n \"acc_norm\": 0.8888888888888888,\n \"acc_norm_stderr\": 0.02239078763821678\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9326424870466321,\n \"acc_stderr\": 0.018088393839078894,\n \"acc_norm\": 0.9326424870466321,\n \"acc_norm_stderr\": 0.018088393839078894\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.7102564102564103,\n \"acc_stderr\": 0.023000628243687968,\n \"acc_norm\": 0.7102564102564103,\n \"acc_norm_stderr\": 0.023000628243687968\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.32222222222222224,\n \"acc_stderr\": 0.028493465091028597,\n \"acc_norm\": 0.32222222222222224,\n \"acc_norm_stderr\": 0.028493465091028597\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7436974789915967,\n \"acc_stderr\": 0.02835962087053395,\n \"acc_norm\": 0.7436974789915967,\n \"acc_norm_stderr\": 0.02835962087053395\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.4768211920529801,\n \"acc_stderr\": 0.04078093859163083,\n \"acc_norm\": 0.4768211920529801,\n \"acc_norm_stderr\": 0.04078093859163083\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8899082568807339,\n \"acc_stderr\": 0.0134199390186812,\n \"acc_norm\": 0.8899082568807339,\n \"acc_norm_stderr\": 0.0134199390186812\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5416666666666666,\n \"acc_stderr\": 0.03398110890294636,\n \"acc_norm\": 0.5416666666666666,\n \"acc_norm_stderr\": 0.03398110890294636\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.9215686274509803,\n \"acc_stderr\": 0.018869514646658925,\n \"acc_norm\": 0.9215686274509803,\n \"acc_norm_stderr\": 0.018869514646658925\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8734177215189873,\n \"acc_stderr\": 0.021644195727955173,\n \"acc_norm\": 0.8734177215189873,\n \"acc_norm_stderr\": 0.021644195727955173\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7982062780269058,\n \"acc_stderr\": 0.026936111912802273,\n \"acc_norm\": 0.7982062780269058,\n \"acc_norm_stderr\": 0.026936111912802273\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.816793893129771,\n \"acc_stderr\": 0.03392770926494733,\n \"acc_norm\": 0.816793893129771,\n \"acc_norm_stderr\": 0.03392770926494733\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8677685950413223,\n \"acc_stderr\": 0.030922788320445815,\n \"acc_norm\": 0.8677685950413223,\n \"acc_norm_stderr\": 0.030922788320445815\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8240740740740741,\n \"acc_stderr\": 0.036809181416738807,\n \"acc_norm\": 0.8240740740740741,\n \"acc_norm_stderr\": 0.036809181416738807\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7791411042944786,\n \"acc_stderr\": 0.03259177392742179,\n \"acc_norm\": 0.7791411042944786,\n \"acc_norm_stderr\": 0.03259177392742179\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.49107142857142855,\n \"acc_stderr\": 0.04745033255489123,\n \"acc_norm\": 0.49107142857142855,\n \"acc_norm_stderr\": 0.04745033255489123\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8058252427184466,\n \"acc_stderr\": 0.03916667762822582,\n \"acc_norm\": 0.8058252427184466,\n \"acc_norm_stderr\": 0.03916667762822582\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9017094017094017,\n \"acc_stderr\": 0.019503444900757567,\n \"acc_norm\": 0.9017094017094017,\n \"acc_norm_stderr\": 0.019503444900757567\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8748403575989783,\n \"acc_stderr\": 0.011832954239305724,\n \"acc_norm\": 0.8748403575989783,\n \"acc_norm_stderr\": 0.011832954239305724\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7630057803468208,\n \"acc_stderr\": 0.02289408248992599,\n \"acc_norm\": 0.7630057803468208,\n \"acc_norm_stderr\": 0.02289408248992599\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.5497206703910614,\n \"acc_stderr\": 0.016639615236845817,\n \"acc_norm\": 0.5497206703910614,\n \"acc_norm_stderr\": 0.016639615236845817\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7254901960784313,\n \"acc_stderr\": 0.02555316999182651,\n \"acc_norm\": 0.7254901960784313,\n \"acc_norm_stderr\": 0.02555316999182651\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7684887459807074,\n \"acc_stderr\": 0.023956532766639133,\n \"acc_norm\": 0.7684887459807074,\n \"acc_norm_stderr\": 0.023956532766639133\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.8055555555555556,\n \"acc_stderr\": 0.02202136610022019,\n \"acc_norm\": 0.8055555555555556,\n \"acc_norm_stderr\": 0.02202136610022019\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5460992907801419,\n \"acc_stderr\": 0.029700453247291477,\n \"acc_norm\": 0.5460992907801419,\n \"acc_norm_stderr\": 0.029700453247291477\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.545632333767927,\n \"acc_stderr\": 0.012716941720734818,\n \"acc_norm\": 0.545632333767927,\n \"acc_norm_stderr\": 0.012716941720734818\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7316176470588235,\n \"acc_stderr\": 0.026917481224377197,\n \"acc_norm\": 0.7316176470588235,\n \"acc_norm_stderr\": 0.026917481224377197\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.01751781884501444,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.01751781884501444\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7363636363636363,\n \"acc_stderr\": 0.04220224692971987,\n \"acc_norm\": 0.7363636363636363,\n \"acc_norm_stderr\": 0.04220224692971987\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7918367346938775,\n \"acc_stderr\": 0.025991117672813296,\n \"acc_norm\": 0.7918367346938775,\n \"acc_norm_stderr\": 0.025991117672813296\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8557213930348259,\n \"acc_stderr\": 0.02484575321230604,\n \"acc_norm\": 0.8557213930348259,\n \"acc_norm_stderr\": 0.02484575321230604\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.03487350880197769,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.03487350880197769\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5421686746987951,\n \"acc_stderr\": 0.0387862677100236,\n \"acc_norm\": 0.5421686746987951,\n \"acc_norm_stderr\": 0.0387862677100236\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8654970760233918,\n \"acc_stderr\": 0.026168221344662297,\n \"acc_norm\": 0.8654970760233918,\n \"acc_norm_stderr\": 0.026168221344662297\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.44920440636474906,\n \"mc1_stderr\": 0.01741294198611531,\n \"mc2\": 0.6312306236860621,\n \"mc2_stderr\": 0.014945471343395618\n }\n}\n```", "repo_url": "https://huggingface.co/pankajmathur/model_007", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|arc:challenge|25_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hellaswag|10_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-09T02-03-09.335068.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-09T02-03-09.335068.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_09T02_03_09.335068", "path": ["results_2023-10-09T02-03-09.335068.parquet"]}, {"split": "latest", "path": ["results_2023-10-09T02-03-09.335068.parquet"]}]}]}
2023-10-09T01:04:34+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of pankajmathur/model_007 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model pankajmathur/model_007 on the Open LLM Leaderboard. The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-09T02:03:09.335068(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of pankajmathur/model_007", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model pankajmathur/model_007 on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-09T02:03:09.335068(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of pankajmathur/model_007", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model pankajmathur/model_007 on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-09T02:03:09.335068(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 17, 31, 165, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of pankajmathur/model_007## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model pankajmathur/model_007 on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-09T02:03:09.335068(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
5deb110473d53177c252db57342e566e1a7efc93
# Dataset Card for Evaluation run of pankajmathur/orca_mini_v3_70b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/pankajmathur/orca_mini_v3_70b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [pankajmathur/orca_mini_v3_70b](https://huggingface.co/pankajmathur/orca_mini_v3_70b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_pankajmathur__orca_mini_v3_70b", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-10-09T02:12:05.216705](https://huggingface.co/datasets/open-llm-leaderboard/details_pankajmathur__orca_mini_v3_70b/blob/main/results_2023-10-09T02-12-05.216705.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.7010508529623596, "acc_stderr": 0.0309286120388273, "acc_norm": 0.7049679984523141, "acc_norm_stderr": 0.030896356315399304, "mc1": 0.42962056303549573, "mc1_stderr": 0.017329234580409098, "mc2": 0.6126968953087459, "mc2_stderr": 0.015087648780065216 }, "harness|arc:challenge|25": { "acc": 0.6646757679180887, "acc_stderr": 0.013796182947785562, "acc_norm": 0.712457337883959, "acc_norm_stderr": 0.013226719056266129 }, "harness|hellaswag|10": { "acc": 0.6951802429794861, "acc_stderr": 0.00459390260197934, "acc_norm": 0.8785102569209321, "acc_norm_stderr": 0.0032602788112468337 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6296296296296297, "acc_stderr": 0.04171654161354543, "acc_norm": 0.6296296296296297, "acc_norm_stderr": 0.04171654161354543 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.8157894736842105, "acc_stderr": 0.0315469804508223, "acc_norm": 0.8157894736842105, "acc_norm_stderr": 0.0315469804508223 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.77, "acc_stderr": 0.042295258468165044, "acc_norm": 0.77, "acc_norm_stderr": 0.042295258468165044 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7396226415094339, "acc_stderr": 0.027008766090708052, "acc_norm": 0.7396226415094339, "acc_norm_stderr": 0.027008766090708052 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.8125, "acc_stderr": 0.032639560491693344, "acc_norm": 0.8125, "acc_norm_stderr": 0.032639560491693344 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.39, "acc_stderr": 0.04902071300001974, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001974 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6589595375722543, "acc_stderr": 0.036146654241808254, "acc_norm": 0.6589595375722543, "acc_norm_stderr": 0.036146654241808254 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.38235294117647056, "acc_stderr": 0.04835503696107223, "acc_norm": 0.38235294117647056, "acc_norm_stderr": 0.04835503696107223 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.76, "acc_stderr": 0.042923469599092816, "acc_norm": 0.76, "acc_norm_stderr": 0.042923469599092816 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6936170212765957, "acc_stderr": 0.03013590647851756, "acc_norm": 0.6936170212765957, "acc_norm_stderr": 0.03013590647851756 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4473684210526316, "acc_stderr": 0.04677473004491199, "acc_norm": 0.4473684210526316, "acc_norm_stderr": 0.04677473004491199 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6344827586206897, "acc_stderr": 0.040131241954243856, "acc_norm": 0.6344827586206897, "acc_norm_stderr": 0.040131241954243856 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.48148148148148145, "acc_stderr": 0.02573364199183898, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.02573364199183898 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4444444444444444, "acc_stderr": 0.04444444444444449, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.04444444444444449 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8161290322580645, "acc_stderr": 0.02203721734026783, "acc_norm": 0.8161290322580645, "acc_norm_stderr": 0.02203721734026783 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5665024630541872, "acc_stderr": 0.034867317274198714, "acc_norm": 0.5665024630541872, "acc_norm_stderr": 0.034867317274198714 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.78, "acc_stderr": 0.04163331998932262, "acc_norm": 0.78, "acc_norm_stderr": 0.04163331998932262 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8424242424242424, "acc_stderr": 0.02845038880528436, "acc_norm": 0.8424242424242424, "acc_norm_stderr": 0.02845038880528436 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8737373737373737, "acc_stderr": 0.023664359402880232, "acc_norm": 0.8737373737373737, "acc_norm_stderr": 0.023664359402880232 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9326424870466321, "acc_stderr": 0.0180883938390789, "acc_norm": 0.9326424870466321, "acc_norm_stderr": 0.0180883938390789 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.7025641025641025, "acc_stderr": 0.023177408131465942, "acc_norm": 0.7025641025641025, "acc_norm_stderr": 0.023177408131465942 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3037037037037037, "acc_stderr": 0.028037929969114986, "acc_norm": 0.3037037037037037, "acc_norm_stderr": 0.028037929969114986 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7478991596638656, "acc_stderr": 0.028205545033277723, "acc_norm": 0.7478991596638656, "acc_norm_stderr": 0.028205545033277723 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.5033112582781457, "acc_stderr": 0.04082393379449654, "acc_norm": 0.5033112582781457, "acc_norm_stderr": 0.04082393379449654 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.9009174311926605, "acc_stderr": 0.01280978008187893, "acc_norm": 0.9009174311926605, "acc_norm_stderr": 0.01280978008187893 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5879629629629629, "acc_stderr": 0.03356787758160831, "acc_norm": 0.5879629629629629, "acc_norm_stderr": 0.03356787758160831 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.9264705882352942, "acc_stderr": 0.018318855850089678, "acc_norm": 0.9264705882352942, "acc_norm_stderr": 0.018318855850089678 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.9029535864978903, "acc_stderr": 0.019269323025640255, "acc_norm": 0.9029535864978903, "acc_norm_stderr": 0.019269323025640255 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7757847533632287, "acc_stderr": 0.027991534258519513, "acc_norm": 0.7757847533632287, "acc_norm_stderr": 0.027991534258519513 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8625954198473282, "acc_stderr": 0.030194823996804475, "acc_norm": 0.8625954198473282, "acc_norm_stderr": 0.030194823996804475 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8760330578512396, "acc_stderr": 0.030083098716035202, "acc_norm": 0.8760330578512396, "acc_norm_stderr": 0.030083098716035202 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8240740740740741, "acc_stderr": 0.036809181416738807, "acc_norm": 0.8240740740740741, "acc_norm_stderr": 0.036809181416738807 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.8159509202453987, "acc_stderr": 0.030446777687971726, "acc_norm": 0.8159509202453987, "acc_norm_stderr": 0.030446777687971726 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5178571428571429, "acc_stderr": 0.047427623612430116, "acc_norm": 0.5178571428571429, "acc_norm_stderr": 0.047427623612430116 }, "harness|hendrycksTest-management|5": { "acc": 0.8155339805825242, "acc_stderr": 0.03840423627288276, "acc_norm": 0.8155339805825242, "acc_norm_stderr": 0.03840423627288276 }, "harness|hendrycksTest-marketing|5": { "acc": 0.9145299145299145, "acc_stderr": 0.01831589168562585, "acc_norm": 0.9145299145299145, "acc_norm_stderr": 0.01831589168562585 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8748403575989783, "acc_stderr": 0.011832954239305733, "acc_norm": 0.8748403575989783, "acc_norm_stderr": 0.011832954239305733 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.791907514450867, "acc_stderr": 0.0218552552634218, "acc_norm": 0.791907514450867, "acc_norm_stderr": 0.0218552552634218 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.5575418994413408, "acc_stderr": 0.01661139368726857, "acc_norm": 0.5575418994413408, "acc_norm_stderr": 0.01661139368726857 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7418300653594772, "acc_stderr": 0.025058503316958157, "acc_norm": 0.7418300653594772, "acc_norm_stderr": 0.025058503316958157 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.77491961414791, "acc_stderr": 0.023720088516179027, "acc_norm": 0.77491961414791, "acc_norm_stderr": 0.023720088516179027 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8209876543209876, "acc_stderr": 0.021330868762127062, "acc_norm": 0.8209876543209876, "acc_norm_stderr": 0.021330868762127062 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5851063829787234, "acc_stderr": 0.0293922365846125, "acc_norm": 0.5851063829787234, "acc_norm_stderr": 0.0293922365846125 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.559973924380704, "acc_stderr": 0.012678037478574513, "acc_norm": 0.559973924380704, "acc_norm_stderr": 0.012678037478574513 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7169117647058824, "acc_stderr": 0.02736586113151381, "acc_norm": 0.7169117647058824, "acc_norm_stderr": 0.02736586113151381 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.7549019607843137, "acc_stderr": 0.017401816711427653, "acc_norm": 0.7549019607843137, "acc_norm_stderr": 0.017401816711427653 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7181818181818181, "acc_stderr": 0.043091187099464585, "acc_norm": 0.7181818181818181, "acc_norm_stderr": 0.043091187099464585 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.8, "acc_stderr": 0.02560737598657916, "acc_norm": 0.8, "acc_norm_stderr": 0.02560737598657916 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8756218905472637, "acc_stderr": 0.023335401790166327, "acc_norm": 0.8756218905472637, "acc_norm_stderr": 0.023335401790166327 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.91, "acc_stderr": 0.028762349126466125, "acc_norm": 0.91, "acc_norm_stderr": 0.028762349126466125 }, "harness|hendrycksTest-virology|5": { "acc": 0.5240963855421686, "acc_stderr": 0.03887971849597264, "acc_norm": 0.5240963855421686, "acc_norm_stderr": 0.03887971849597264 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8654970760233918, "acc_stderr": 0.026168221344662297, "acc_norm": 0.8654970760233918, "acc_norm_stderr": 0.026168221344662297 }, "harness|truthfulqa:mc|0": { "mc1": 0.42962056303549573, "mc1_stderr": 0.017329234580409098, "mc2": 0.6126968953087459, "mc2_stderr": 0.015087648780065216 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_pankajmathur__orca_mini_v3_70b
[ "region:us" ]
2023-10-09T01:12:28+00:00
{"pretty_name": "Evaluation run of pankajmathur/orca_mini_v3_70b", "dataset_summary": "Dataset automatically created during the evaluation run of model [pankajmathur/orca_mini_v3_70b](https://huggingface.co/pankajmathur/orca_mini_v3_70b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_pankajmathur__orca_mini_v3_70b\",\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-09T02:12:05.216705](https://huggingface.co/datasets/open-llm-leaderboard/details_pankajmathur__orca_mini_v3_70b/blob/main/results_2023-10-09T02-12-05.216705.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.7010508529623596,\n \"acc_stderr\": 0.0309286120388273,\n \"acc_norm\": 0.7049679984523141,\n \"acc_norm_stderr\": 0.030896356315399304,\n \"mc1\": 0.42962056303549573,\n \"mc1_stderr\": 0.017329234580409098,\n \"mc2\": 0.6126968953087459,\n \"mc2_stderr\": 0.015087648780065216\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6646757679180887,\n \"acc_stderr\": 0.013796182947785562,\n \"acc_norm\": 0.712457337883959,\n \"acc_norm_stderr\": 0.013226719056266129\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6951802429794861,\n \"acc_stderr\": 0.00459390260197934,\n \"acc_norm\": 0.8785102569209321,\n \"acc_norm_stderr\": 0.0032602788112468337\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6296296296296297,\n \"acc_stderr\": 0.04171654161354543,\n \"acc_norm\": 0.6296296296296297,\n \"acc_norm_stderr\": 0.04171654161354543\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.8157894736842105,\n \"acc_stderr\": 0.0315469804508223,\n \"acc_norm\": 0.8157894736842105,\n \"acc_norm_stderr\": 0.0315469804508223\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.042295258468165044,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.042295258468165044\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7396226415094339,\n \"acc_stderr\": 0.027008766090708052,\n \"acc_norm\": 0.7396226415094339,\n \"acc_norm_stderr\": 0.027008766090708052\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.8125,\n \"acc_stderr\": 0.032639560491693344,\n \"acc_norm\": 0.8125,\n \"acc_norm_stderr\": 0.032639560491693344\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001974,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001974\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6589595375722543,\n \"acc_stderr\": 0.036146654241808254,\n \"acc_norm\": 0.6589595375722543,\n \"acc_norm_stderr\": 0.036146654241808254\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.38235294117647056,\n \"acc_stderr\": 0.04835503696107223,\n \"acc_norm\": 0.38235294117647056,\n \"acc_norm_stderr\": 0.04835503696107223\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6936170212765957,\n \"acc_stderr\": 0.03013590647851756,\n \"acc_norm\": 0.6936170212765957,\n \"acc_norm_stderr\": 0.03013590647851756\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4473684210526316,\n \"acc_stderr\": 0.04677473004491199,\n \"acc_norm\": 0.4473684210526316,\n \"acc_norm_stderr\": 0.04677473004491199\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6344827586206897,\n \"acc_stderr\": 0.040131241954243856,\n \"acc_norm\": 0.6344827586206897,\n \"acc_norm_stderr\": 0.040131241954243856\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.48148148148148145,\n \"acc_stderr\": 0.02573364199183898,\n \"acc_norm\": 0.48148148148148145,\n \"acc_norm_stderr\": 0.02573364199183898\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4444444444444444,\n \"acc_stderr\": 0.04444444444444449,\n \"acc_norm\": 0.4444444444444444,\n \"acc_norm_stderr\": 0.04444444444444449\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8161290322580645,\n \"acc_stderr\": 0.02203721734026783,\n \"acc_norm\": 0.8161290322580645,\n \"acc_norm_stderr\": 0.02203721734026783\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5665024630541872,\n \"acc_stderr\": 0.034867317274198714,\n \"acc_norm\": 0.5665024630541872,\n \"acc_norm_stderr\": 0.034867317274198714\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.78,\n \"acc_stderr\": 0.04163331998932262,\n \"acc_norm\": 0.78,\n \"acc_norm_stderr\": 0.04163331998932262\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8424242424242424,\n \"acc_stderr\": 0.02845038880528436,\n \"acc_norm\": 0.8424242424242424,\n \"acc_norm_stderr\": 0.02845038880528436\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8737373737373737,\n \"acc_stderr\": 0.023664359402880232,\n \"acc_norm\": 0.8737373737373737,\n \"acc_norm_stderr\": 0.023664359402880232\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9326424870466321,\n \"acc_stderr\": 0.0180883938390789,\n \"acc_norm\": 0.9326424870466321,\n \"acc_norm_stderr\": 0.0180883938390789\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.7025641025641025,\n \"acc_stderr\": 0.023177408131465942,\n \"acc_norm\": 0.7025641025641025,\n \"acc_norm_stderr\": 0.023177408131465942\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3037037037037037,\n \"acc_stderr\": 0.028037929969114986,\n \"acc_norm\": 0.3037037037037037,\n \"acc_norm_stderr\": 0.028037929969114986\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7478991596638656,\n \"acc_stderr\": 0.028205545033277723,\n \"acc_norm\": 0.7478991596638656,\n \"acc_norm_stderr\": 0.028205545033277723\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.5033112582781457,\n \"acc_stderr\": 0.04082393379449654,\n \"acc_norm\": 0.5033112582781457,\n \"acc_norm_stderr\": 0.04082393379449654\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.9009174311926605,\n \"acc_stderr\": 0.01280978008187893,\n \"acc_norm\": 0.9009174311926605,\n \"acc_norm_stderr\": 0.01280978008187893\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5879629629629629,\n \"acc_stderr\": 0.03356787758160831,\n \"acc_norm\": 0.5879629629629629,\n \"acc_norm_stderr\": 0.03356787758160831\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.9264705882352942,\n \"acc_stderr\": 0.018318855850089678,\n \"acc_norm\": 0.9264705882352942,\n \"acc_norm_stderr\": 0.018318855850089678\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.9029535864978903,\n \"acc_stderr\": 0.019269323025640255,\n \"acc_norm\": 0.9029535864978903,\n \"acc_norm_stderr\": 0.019269323025640255\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7757847533632287,\n \"acc_stderr\": 0.027991534258519513,\n \"acc_norm\": 0.7757847533632287,\n \"acc_norm_stderr\": 0.027991534258519513\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8625954198473282,\n \"acc_stderr\": 0.030194823996804475,\n \"acc_norm\": 0.8625954198473282,\n \"acc_norm_stderr\": 0.030194823996804475\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8760330578512396,\n \"acc_stderr\": 0.030083098716035202,\n \"acc_norm\": 0.8760330578512396,\n \"acc_norm_stderr\": 0.030083098716035202\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8240740740740741,\n \"acc_stderr\": 0.036809181416738807,\n \"acc_norm\": 0.8240740740740741,\n \"acc_norm_stderr\": 0.036809181416738807\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.8159509202453987,\n \"acc_stderr\": 0.030446777687971726,\n \"acc_norm\": 0.8159509202453987,\n \"acc_norm_stderr\": 0.030446777687971726\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5178571428571429,\n \"acc_stderr\": 0.047427623612430116,\n \"acc_norm\": 0.5178571428571429,\n \"acc_norm_stderr\": 0.047427623612430116\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8155339805825242,\n \"acc_stderr\": 0.03840423627288276,\n \"acc_norm\": 0.8155339805825242,\n \"acc_norm_stderr\": 0.03840423627288276\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9145299145299145,\n \"acc_stderr\": 0.01831589168562585,\n \"acc_norm\": 0.9145299145299145,\n \"acc_norm_stderr\": 0.01831589168562585\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8748403575989783,\n \"acc_stderr\": 0.011832954239305733,\n \"acc_norm\": 0.8748403575989783,\n \"acc_norm_stderr\": 0.011832954239305733\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.791907514450867,\n \"acc_stderr\": 0.0218552552634218,\n \"acc_norm\": 0.791907514450867,\n \"acc_norm_stderr\": 0.0218552552634218\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.5575418994413408,\n \"acc_stderr\": 0.01661139368726857,\n \"acc_norm\": 0.5575418994413408,\n \"acc_norm_stderr\": 0.01661139368726857\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7418300653594772,\n \"acc_stderr\": 0.025058503316958157,\n \"acc_norm\": 0.7418300653594772,\n \"acc_norm_stderr\": 0.025058503316958157\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.77491961414791,\n \"acc_stderr\": 0.023720088516179027,\n \"acc_norm\": 0.77491961414791,\n \"acc_norm_stderr\": 0.023720088516179027\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.8209876543209876,\n \"acc_stderr\": 0.021330868762127062,\n \"acc_norm\": 0.8209876543209876,\n \"acc_norm_stderr\": 0.021330868762127062\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5851063829787234,\n \"acc_stderr\": 0.0293922365846125,\n \"acc_norm\": 0.5851063829787234,\n \"acc_norm_stderr\": 0.0293922365846125\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.559973924380704,\n \"acc_stderr\": 0.012678037478574513,\n \"acc_norm\": 0.559973924380704,\n \"acc_norm_stderr\": 0.012678037478574513\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7169117647058824,\n \"acc_stderr\": 0.02736586113151381,\n \"acc_norm\": 0.7169117647058824,\n \"acc_norm_stderr\": 0.02736586113151381\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.7549019607843137,\n \"acc_stderr\": 0.017401816711427653,\n \"acc_norm\": 0.7549019607843137,\n \"acc_norm_stderr\": 0.017401816711427653\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7181818181818181,\n \"acc_stderr\": 0.043091187099464585,\n \"acc_norm\": 0.7181818181818181,\n \"acc_norm_stderr\": 0.043091187099464585\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.02560737598657916,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.02560737598657916\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8756218905472637,\n \"acc_stderr\": 0.023335401790166327,\n \"acc_norm\": 0.8756218905472637,\n \"acc_norm_stderr\": 0.023335401790166327\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.91,\n \"acc_stderr\": 0.028762349126466125,\n \"acc_norm\": 0.91,\n \"acc_norm_stderr\": 0.028762349126466125\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5240963855421686,\n \"acc_stderr\": 0.03887971849597264,\n \"acc_norm\": 0.5240963855421686,\n \"acc_norm_stderr\": 0.03887971849597264\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8654970760233918,\n \"acc_stderr\": 0.026168221344662297,\n \"acc_norm\": 0.8654970760233918,\n \"acc_norm_stderr\": 0.026168221344662297\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.42962056303549573,\n \"mc1_stderr\": 0.017329234580409098,\n \"mc2\": 0.6126968953087459,\n \"mc2_stderr\": 0.015087648780065216\n }\n}\n```", "repo_url": "https://huggingface.co/pankajmathur/orca_mini_v3_70b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|arc:challenge|25_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hellaswag|10_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-09T02-12-05.216705.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-09T02-12-05.216705.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_09T02_12_05.216705", "path": ["results_2023-10-09T02-12-05.216705.parquet"]}, {"split": "latest", "path": ["results_2023-10-09T02-12-05.216705.parquet"]}]}]}
2023-10-09T01:13:29+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of pankajmathur/orca_mini_v3_70b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model pankajmathur/orca_mini_v3_70b on the Open LLM Leaderboard. The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-09T02:12:05.216705(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of pankajmathur/orca_mini_v3_70b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model pankajmathur/orca_mini_v3_70b on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-09T02:12:05.216705(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of pankajmathur/orca_mini_v3_70b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model pankajmathur/orca_mini_v3_70b on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-09T02:12:05.216705(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 24, 31, 172, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of pankajmathur/orca_mini_v3_70b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model pankajmathur/orca_mini_v3_70b on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-09T02:12:05.216705(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
9833490feb6342a88c7d982cae629e3023672b76
# Dataset Card for "quac-merged" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
hanifabdlh/quac-merged
[ "region:us" ]
2023-10-09T01:15:37+00:00
{"dataset_info": {"features": [{"name": "context", "dtype": "string"}, {"name": "instruction", "dtype": "string"}, {"name": "response", "dtype": "string"}, {"name": "instruction_source", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 271212149, "num_examples": 482055}], "download_size": 142626540, "dataset_size": 271212149}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-09T01:15:54+00:00
[]
[]
TAGS #region-us
# Dataset Card for "quac-merged" More Information needed
[ "# Dataset Card for \"quac-merged\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"quac-merged\"\n\nMore Information needed" ]
[ 6, 15 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"quac-merged\"\n\nMore Information needed" ]
ea0c222b5502da19bb85fafbc19718ecd036ab06
# このデータセットは、2023年に有名になったdatabrick-15kの日本語版です。 ## ただし、データは4分割されています。 ## データの内容は非常に変わっています。(半分ぐらいは、原型をとどめていません) - カタカナ語にカッコ付けで英語を追記しました。 - このデータセットには、QnAとして異常なレコードが見られることから修正しました。 - 「ゲームオブスローン」に関するトリビアなど、情報価値が低いものは削除しました。 - その他、いろいろなトライアルとして情報を追加しました。 詳しい情報は[こちらのブログ](https://jpnqeur23lmqsw.blogspot.com/2023/09/qeur23llmdss10-databricks15k.html)を参考にしてください。
QEU/databricks-dolly-16k-line_ja-4_of_4
[ "license:apache-2.0", "region:us" ]
2023-10-09T02:09:56+00:00
{"license": "apache-2.0"}
2023-12-04T09:21:45+00:00
[]
[]
TAGS #license-apache-2.0 #region-us
# このデータセットは、2023年に有名になったdatabrick-15kの日本語版です。 ## ただし、データは4分割されています。 ## データの内容は非常に変わっています。(半分ぐらいは、原型をとどめていません) - カタカナ語にカッコ付けで英語を追記しました。 - このデータセットには、QnAとして異常なレコードが見られることから修正しました。 - 「ゲームオブスローン」に関するトリビアなど、情報価値が低いものは削除しました。 - その他、いろいろなトライアルとして情報を追加しました。 詳しい情報はこちらのブログを参考にしてください。
[ "# このデータセットは、2023年に有名になったdatabrick-15kの日本語版です。", "## ただし、データは4分割されています。", "## データの内容は非常に変わっています。(半分ぐらいは、原型をとどめていません)\n- カタカナ語にカッコ付けで英語を追記しました。\n- このデータセットには、QnAとして異常なレコードが見られることから修正しました。\n- 「ゲームオブスローン」に関するトリビアなど、情報価値が低いものは削除しました。\n- その他、いろいろなトライアルとして情報を追加しました。\n\n詳しい情報はこちらのブログを参考にしてください。" ]
[ "TAGS\n#license-apache-2.0 #region-us \n", "# このデータセットは、2023年に有名になったdatabrick-15kの日本語版です。", "## ただし、データは4分割されています。", "## データの内容は非常に変わっています。(半分ぐらいは、原型をとどめていません)\n- カタカナ語にカッコ付けで英語を追記しました。\n- このデータセットには、QnAとして異常なレコードが見られることから修正しました。\n- 「ゲームオブスローン」に関するトリビアなど、情報価値が低いものは削除しました。\n- その他、いろいろなトライアルとして情報を追加しました。\n\n詳しい情報はこちらのブログを参考にしてください。" ]
[ 14, 21, 10, 102 ]
[ "passage: TAGS\n#license-apache-2.0 #region-us \n# このデータセットは、2023年に有名になったdatabrick-15kの日本語版です。## ただし、データは4分割されています。## データの内容は非常に変わっています。(半分ぐらいは、原型をとどめていません)\n- カタカナ語にカッコ付けで英語を追記しました。\n- このデータセットには、QnAとして異常なレコードが見られることから修正しました。\n- 「ゲームオブスローン」に関するトリビアなど、情報価値が低いものは削除しました。\n- その他、いろいろなトライアルとして情報を追加しました。\n\n詳しい情報はこちらのブログを参考にしてください。" ]
b7884f6ed7e844b781571e7d48fe8edbd212e0ac
# Dataset Card for Dataset Name ## Dataset Description - **Homepage:** - **Repository:** - **Paper:** - **Leaderboard:** - **Point of Contact:** ### Dataset Summary This dataset card aims to be a base template for new datasets. It has been generated using [this raw template](https://github.com/huggingface/huggingface_hub/blob/main/src/huggingface_hub/templates/datasetcard_template.md?plain=1). ### Supported Tasks and Leaderboards [More Information Needed] ### Languages This dataset is purely in English. Some of the responses were generated by ChatGPT. ### Discussion of Biases This dataset intentionally carries gender and job-related biases which reflect ones that exist in society, for the research purposes of examining the effects the biases have on the model. Creators do not support these biases.
d4un/training-bias
[ "region:us" ]
2023-10-09T02:15:54+00:00
{}
2023-10-09T04:12:26+00:00
[]
[]
TAGS #region-us
# Dataset Card for Dataset Name ## Dataset Description - Homepage: - Repository: - Paper: - Leaderboard: - Point of Contact: ### Dataset Summary This dataset card aims to be a base template for new datasets. It has been generated using this raw template. ### Supported Tasks and Leaderboards ### Languages This dataset is purely in English. Some of the responses were generated by ChatGPT. ### Discussion of Biases This dataset intentionally carries gender and job-related biases which reflect ones that exist in society, for the research purposes of examining the effects the biases have on the model. Creators do not support these biases.
[ "# Dataset Card for Dataset Name", "## Dataset Description\n\n- Homepage: \n- Repository: \n- Paper: \n- Leaderboard: \n- Point of Contact:", "### Dataset Summary\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.", "### Supported Tasks and Leaderboards", "### Languages\n\nThis dataset is purely in English.\n\n\nSome of the responses were generated by ChatGPT.", "### Discussion of Biases\n\nThis dataset intentionally carries gender and job-related biases which reflect ones that exist in society,\nfor the research purposes of examining the effects the biases have on the model. Creators do not support these biases." ]
[ "TAGS\n#region-us \n", "# Dataset Card for Dataset Name", "## Dataset Description\n\n- Homepage: \n- Repository: \n- Paper: \n- Leaderboard: \n- Point of Contact:", "### Dataset Summary\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.", "### Supported Tasks and Leaderboards", "### Languages\n\nThis dataset is purely in English.\n\n\nSome of the responses were generated by ChatGPT.", "### Discussion of Biases\n\nThis dataset intentionally carries gender and job-related biases which reflect ones that exist in society,\nfor the research purposes of examining the effects the biases have on the model. Creators do not support these biases." ]
[ 6, 8, 24, 32, 10, 26, 60 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Dataset Name## Dataset Description\n\n- Homepage: \n- Repository: \n- Paper: \n- Leaderboard: \n- Point of Contact:### Dataset Summary\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.### Supported Tasks and Leaderboards### Languages\n\nThis dataset is purely in English.\n\n\nSome of the responses were generated by ChatGPT.### Discussion of Biases\n\nThis dataset intentionally carries gender and job-related biases which reflect ones that exist in society,\nfor the research purposes of examining the effects the biases have on the model. Creators do not support these biases." ]
46dce924ed8786979556018e191c0f557d8f4aa2
# LongLoRA and LongAlpaca for Long-context LLMs [![Huggingface Models](https://img.shields.io/badge/Models-Huggingface%20Models-bron)](https://huggingface.co/Yukang) [![Github](https://img.shields.io/badge/Github-Repo-cyan)](https://github.com/dvlab-research/LongLoRA) [![Data](https://img.shields.io/badge/Data-LongAlpaca%2012k-light)](https://huggingface.co/datasets/Yukang/LongAlpaca-12k) [![Paper](https://img.shields.io/badge/Paper-Arvix-blue)](https://arxiv.org/abs/2309.12307) [![Code License](https://img.shields.io/badge/Code%20License-Apache_2.0-yellow.svg)](https://github.com/dvlab-research/LongLoRA/blob/main/LICENSE) [![Data License](https://img.shields.io/badge/Data%20License-CC%20By%20NC%204.0-orange.svg)](https://github.com/dvlab-research/LongLoRA/blob/main/DATA_LICENSE) [![Weight License](https://img.shields.io/badge/Weight%20License-CC%20By%20NC%204.0-red)](https://github.com/dvlab-research/LongLoRA/blob/main/WEIGHT_LICENSE) For detailed usage and codes, please visit the [Github project](https://github.com/dvlab-research/LongLoRA). ## TABLE OF CONTENTS 1. [News](#news) 2. [Examples](#examples) 3. [Highlights](#highlights) 4. [How to contribute](#how-to-contribute) 5. [Requirements](#usage-requirements) 6. [Installation and quick guide](#installation-and-quick-guide) 7. [LongAlpaca Data](#longalpaca-data) 8. [Models](#models) 9. [Training](#training) 10. [Evaluation](#evaluation) 11. [Demo](#demo) 12. [Data Generation via Pdf2Text](#data-generation-via-pdf2text) 13. [Citation](#citation) 14. [Acknowledgement](#acknowledgement) 15. [License](#license) ## News - [x] [2023.10.8] **We release the long instruction-following dataset**, [LongAlpaca-12k](https://huggingface.co/datasets/Yukang/LongAlpaca-12k) and **the corresponding models**, [LongAlpaca-7B](https://huggingface.co/Yukang/LongAlpaca-7B), [LongAlpaca-13B](https://huggingface.co/Yukang/LongAlpaca-13B), and [LongAlpaca-70B](https://huggingface.co/Yukang/LongAlpaca-70B). - (*The previous sft models*, [Llama-2-13b-chat-longlora-32k-sft](https://huggingface.co/Yukang/Llama-2-13b-chat-longlora-32k-sft) and [Llama-2-70b-chat-longlora-32k-sft](https://huggingface.co/Yukang/Llama-2-70b-chat-longlora-32k-sft), *have been depreciated*.) - [x] [2023.10.3] We add support GPTNeoX models. Please refer to this [PR](https://github.com/dvlab-research/LongLoRA/pull/32) for usage. Thanks for @naubull2 for this contribution. - [x] [2023.9.22] We release all our fine-tuned [models](https://huggingface.co/Yukang), including **70B-32k models**, [LLaMA2-LongLoRA-70B-32k](https://huggingface.co/Yukang/Llama-2-70b-longlora-32k), [LLaMA2-LongLoRA-7B-100k](https://huggingface.co/Yukang/Llama-2-7b-longlora-100k-ft). Welcome to check them out! - [x] [2023.9.22] We release [Paper](http://arxiv.org/abs/2309.12307) and this GitHub repo, including training and evaluation code. **LongLoRA: Efficient Fine-tuning of Long-Context Large Language Models [[Paper](http://arxiv.org/abs/2309.12307)]** <br /> [Yukang Chen](https://scholar.google.com/citations?user=6p0ygKUAAAAJ&hl=en), [Shengju Qian](https://scholar.google.com/citations?user=QNnWmasAAAAJ), [Haotian Tang](https://scholar.google.com/citations?user=WxL13BAAAAAJ&hl), [Xin Lai](https://scholar.google.com/citations?user=tqNDPA4AAAAJ&hl=zh-CN), [Zhijian Liu](https://scholar.google.com/citations?user=3coYSTUAAAAJ&hl=en), [Song Han](https://scholar.google.com/citations?user=E0iCaa4AAAAJ&hl=zh-CN), [Jiaya Jia](https://scholar.google.com/citations?user=XPAkzTEAAAAJ&hl=en)<br /> ## Highlights 1. In LongLoRA approach, The proposed shifted short attention is easy to implement, compatible with Flash-Attention, and is not required during inference. 2. We released all our models, including models from 7B to 70B, context length from 8k to 100k, including [LLaMA2-LongLoRA-7B-100k](https://huggingface.co/Yukang/Llama-2-7b-longlora-100k-ft), [LLaMA2-LongLoRA-13B-64k](https://huggingface.co/Yukang/Llama-2-13b-longlora-64k), and [LLaMA2-LongLoRA-70B-32k](https://huggingface.co/Yukang/Llama-2-70b-longlora-32k). 3. We built up a long-context instruction-following dataset, [LongAlpaca-12k](#longalpaca-data). We released the corresponding [LongAlpaca-7B](https://huggingface.co/Yukang/LongAlpaca-7B), [LongAlpaca-13B](https://huggingface.co/Yukang/LongAlpaca-13B) and [LongAlpaca-70B](https://huggingface.co/Yukang/LongAlpaca-70B) models. To our best knowledge, this is the first open-sourced long-context 70B model. ## How to Contribute - Make sure to have git installed. - Create your own [fork](https://github.com/dvlab-research/LongLoRA/fork) of the project. - Clone the repository on your local machine, using git clone and pasting the url of this project. - Read both the `Requirements` and `Installation and Quick Guide` sections below. - Commit and push your changes. - Make a pull request when finished modifying the project. ## Usage Requirements To download and use the [pre-trained weights](#pre-trained-weights) you will need: 1. Hugging Face (HF) account with valid email. Note, the email used for HF must alse be used for the license agreement. 2. Accept the Meta [license and acceptable use policy](https://ai.meta.com/resources/models-and-libraries/llama-downloads/) ## Installation and Quick Guide To install and run the application: 1. [Fork this repo](https://github.com/dvlab-research/LongLoRA/fork) on github 2. Clone the repository on your local machine, using git clone and pasting the url of this project. 3. Run the following code: ``` pip install -r requirements.txt pip install flash-attn --no-build-isolation ``` 4. Use either a [Released model](#released-models) or [Fine tune](#fine-tuning) a model to fit your preferences. 5. Test your model by chat. 6. Deploy your own demo. ## LongAlpaca Data LongAlpaca-12k contains 9k long QA data that we collected and 3k short QA sampled from the original [Alpaca data](https://github.com/tatsu-lab/stanford_alpaca/blob/main/alpaca_data.json). This is to avoid the case that the model might degrade at short instruction following. The data we collect contains various types and amounts as the following figure. | Data | Short QA | Long QA | Total | Download | |:---------------|----------|----------|----------|----------| | LongAlpaca-12k | 3k | 9k | 12k | [Link](https://huggingface.co/datasets/Yukang/LongAlpaca-12k) | Following the original Alpaca format, our Long QA data uses the following prompts for fine-tuning: - `instruction`: `str`, describes the task the model should perform. For example, to answer a question after reading a book section or paper. We vary the contents and questions to make instructions diverse. - `output`: `str`, the answer to the instruction. We did not use the `input` format in the Alpaca format for simplicity. ## Models ### Models with supervised fine-tuning | Model | Size | Context | Train | Link | |:---------------|------|---------|---------|-----------------------------------------------------------------------------------------------------------------------| | LongAlpaca-7B | 7B | 32768 | Full FT | [Model](https://huggingface.co/Yukang/LongAlpaca-7B) | | LongAlpaca-13B | 13B | 32768 | Full FT | [Model](https://huggingface.co/Yukang/LongAlpaca-13B) | | LongAlpaca-70B | 70B | 32768 | LoRA+ | [Model](https://huggingface.co/Yukang/LongAlpaca-70B) [(LoRA-weight)](https://huggingface.co/Yukang/LongAlpaca-70B-lora) | ### Models with context extension via fully fine-tuning | Model | Size | Context | Train | Link | |:----------------------------|------|---------|-------|-------------------------------------------------------------------| | Llama-2-7b-longlora-8k-ft | 7B | 8192 | Full FT | [Model](https://huggingface.co/Yukang/Llama-2-7b-longlora-8k-ft) | | Llama-2-7b-longlora-16k-ft | 7B | 16384 | Full FT | [Model](https://huggingface.co/Yukang/Llama-2-7b-longlora-16k-ft) | | Llama-2-7b-longlora-32k-ft | 7B | 32768 | Full FT | [Model](https://huggingface.co/Yukang/Llama-2-7b-longlora-32k-ft) | | Llama-2-7b-longlora-100k-ft | 7B | 100000 | Full FT | [Model](https://huggingface.co/Yukang/Llama-2-7b-longlora-100k-ft) | | Llama-2-13b-longlora-8k-ft | 13B | 8192 | Full FT | [Model](https://huggingface.co/Yukang/Llama-2-13b-longlora-8k-ft) | | Llama-2-13b-longlora-16k-ft | 13B | 16384 | Full FT | [Model](https://huggingface.co/Yukang/Llama-2-13b-longlora-16k-ft) | | Llama-2-13b-longlora-32k-ft | 13B | 32768 | Full FT | [Model](https://huggingface.co/Yukang/Llama-2-13b-longlora-32k-ft) | ### Models with context extension via improved LoRA fine-tuning | Model | Size | Context | Train | Link | |:----------------------------|------|---------|-------|---------------------------------------------------------------------| | Llama-2-7b-longlora-8k | 7B | 8192 | LoRA+ | [LoRA-weight](https://huggingface.co/Yukang/Llama-2-7b-longlora-8k) | | Llama-2-7b-longlora-16k | 7B | 16384 | LoRA+ | [LoRA-weight](https://huggingface.co/Yukang/Llama-2-7b-longlora-16k) | | Llama-2-7b-longlora-32k | 7B | 32768 | LoRA+ | [LoRA-weight](https://huggingface.co/Yukang/Llama-2-7b-longlora-32k) | | Llama-2-13b-longlora-8k | 13B | 8192 | LoRA+ | [LoRA-weight](https://huggingface.co/Yukang/Llama-2-13b-longlora-8k) | | Llama-2-13b-longlora-16k | 13B | 16384 | LoRA+ | [LoRA-weight](https://huggingface.co/Yukang/Llama-2-13b-longlora-16k) | | Llama-2-13b-longlora-32k | 13B | 32768 | LoRA+ | [LoRA-weight](https://huggingface.co/Yukang/Llama-2-13b-longlora-32k) | | Llama-2-13b-longlora-64k | 13B | 65536 | LoRA+ | [LoRA-weight](https://huggingface.co/Yukang/Llama-2-13b-longlora-64k) | | Llama-2-70b-longlora-32k | 70B | 32768 | LoRA+ | [LoRA-weight](https://huggingface.co/Yukang/Llama-2-70b-longlora-32k) | | Llama-2-70b-chat-longlora-32k | 70B | 32768 | LoRA+ | [LoRA-weight](https://huggingface.co/Yukang/Llama-2-70b-chat-longlora-32k) | ## Training ### Pre-trained weights We use LLaMA2 models as the pre-trained weights and fine-tune them to long context window sizes. Download based on your choices. | Pre-trained weights | |:-------------------------------------------------------------------------------------| | [Llama-2-7b-hf](https://huggingface.co/meta-llama/Llama-2-7b-hf) | |[Llama-2-13b-hf](https://huggingface.co/meta-llama/Llama-2-13b-hf) | | [Llama-2-70b-hf](https://huggingface.co/meta-llama/Llama-2-70b-hf) | | [Llama-2-7b-chat-hf](https://huggingface.co/meta-llama/Llama-2-7b-chat-hf) | | [Llama-2-13b-chat-hf](https://huggingface.co/meta-llama/Llama-2-13b-chat-hf) | | [Llama-2-70b-chat-hf](https://huggingface.co/meta-llama/Llama-2-70b-chat-hf) | This project also supports GPTNeoX models as the base model architecture. Some candidate pre-trained weights may include [GPT-NeoX-20B](https://huggingface.co/EleutherAI/gpt-neox-20b), [Polyglot-ko-12.8B](https://huggingface.co/EleutherAI/polyglot-ko-12.8b) and other variants. ### Fine-tuning ``` torchrun --nproc_per_node=8 fine-tune.py \ --model_name_or_path path_to/Llama-2-7b-hf \ --bf16 True \ --output_dir path_to_saving_checkpoints \ --cache_dir path_to_cache \ --model_max_length 8192 \ --use_flash_attn True \ --low_rank_training False \ --num_train_epochs 1 \ --per_device_train_batch_size 1 \ --per_device_eval_batch_size 2 \ --gradient_accumulation_steps 8 \ --evaluation_strategy "no" \ --save_strategy "steps" \ --save_steps 1000 \ --save_total_limit 2 \ --learning_rate 2e-5 \ --weight_decay 0.0 \ --warmup_steps 20 \ --lr_scheduler_type "constant_with_warmup" \ --logging_steps 1 \ --deepspeed "ds_configs/stage2.json" \ --tf32 True \ --max_steps 1000 ``` - Please remember to change `path_to/Llama-2-7b-hf`, `path_to_saving_checkpoints`, `path_to_cache` to your own directory. - Note that you can change `model_max_length` to other values. - You could change `ds_configs/stage2.json` to `ds_configs/stage3.json` if you want. - Please set `use_flash_attn` as `False` if you use V100 machines or do not install flash attention. - You can set `low_rank_training` as `False` if you want to use fully fine-tuning. It will cost more GPU memory and slower, but the performance will be a bit better. - When training is finished, to get the full model weight: ``` cd path_to_saving_checkpoints && python zero_to_fp32.py . pytorch_model.bin ``` ### Supervised Fine-tuning ``` torchrun --nproc_per_node=8 supervised-fine-tune.py \ --model_name_or_path path_to_Llama2_chat_models \ --bf16 True \ --output_dir path_to_saving_checkpoints \ --model_max_length 32768 \ --use_flash_attn True \ --data_path LongAlpaca-12k.json \ --low_rank_training True \ --num_train_epochs 3 \ --per_device_train_batch_size 1 \ --per_device_eval_batch_size 2 \ --gradient_accumulation_steps 1 \ --evaluation_strategy "no" \ --save_strategy "steps" \ --save_steps 1000 \ --save_total_limit 2 \ --learning_rate 2e-5 \ --weight_decay 0.0 \ --warmup_steps 20 \ --lr_scheduler_type "constant_with_warmup" \ --logging_steps 1 \ --deepspeed "ds_configs/stage2.json" \ --tf32 True ``` - There is no need to make supervised fine-tuning upon the fine-tuned context extended models. It is all right to directly use base model as Llama2-chat models, as the amount of long instruction following data is enough for SFT. - Our long instruction following data can be found in [LongAlpaca-12k.json](https://huggingface.co/datasets/Yukang/LongAlpaca-12k). ### Get trainable weights in low-rank training In low-rank training, we set embedding and normalization layers as trainable. Please use the following line to extract the trainable weights `trainable_params.bin` from `pytorch_model.bin` ``` python3 get_trainable_weights.py --checkpoint_path path_to_saving_checkpoints --trainable_params "embed,norm" ``` ### Merge LoRA Weight Merge the LoRA weights of `pytorch_model.bin` and trainable parameters `trainable_params.bin`, save the resulting model into your desired path in the Hugging Face format: ``` python3 merge_lora_weights_and_save_hf_model.py \ --base_model path_to/Llama-2-7b-hf \ --peft_model path_to_saving_checkpoints \ --context_size 8192 \ --save_path path_to_saving_merged_model ``` For example, ``` python3 merge_lora_weights_and_save_hf_model.py \ --base_model /dataset/pretrained-models/Llama-2-7b-hf \ --peft_model /dataset/yukangchen/hf_models/lora-models/Llama-2-7b-longlora-8k \ --context_size 8192 \ --save_path /dataset/yukangchen/models/Llama-2-7b-longlora-8k-merged ``` ## Evaluation ### Perplexity Validation To evaluate a model that is trained in the low-rank setting, please set both `base_model` and `peft_model`. `base_model` is the pre-trained weight. `peft_model` is the path to the saved checkpoint, which should contain `trainable_params.bin`, `adapter_model.bin` and `adapter_config.json`. For example, ``` python3 eval.py --seq_len 8192 --context_size 8192 --batch_size 1 --base_model path_to/Llama-2-7b-hf --peft_model path_to_saving_checkpoints --data_path pg19/test.bin ``` To evaluate a model that is fully fine-tuned, you only need to set `base_model` as the path to the saved checkpoint, which should contain `pytorch_model.bin` and `config.json`. `peft_model` should be ignored. ``` python3 eval.py --seq_len 8192 --context_size 8192 --batch_size 1 --base_model path_to_saving_checkpoints --data_path pg19/test.bin ``` - Note that `--seq_len` is to set the sequence length for evaluation. `--context_size` is to set the context length of the model during fine-tuning. `--seq_len` should not be larger than `--context_size`. - We have already tokenized the validation and test splits of PG19 and proof-pile dataset into `pg19/validation.bin`, `pg19/test.bin`, and `proof-pile/test_sampled_data.bin`, with the tokenizer of LLaMA. `proof-pile/test_sampled_data.bin` contains 128 documents that are randomly sampled from the total proof-pile test split. For each document, it has at least 32768 tokens. We also release the sampled ids in [proof-pile/test_sampled_ids.bin](https://drive.google.com/file/d/1cnzWODLRQYAd7HeugzLCIhaqzaLZv7J5/view?usp=share_link). You can download them from the links below. | Dataset | Split | Link | |:-----------|------------|--------------------------------------------------------------------------------------------------------------| | PG19 | validation | [pg19/validation.bin](https://drive.google.com/file/d/1rbJvb0qRIf2mQoN2ON7S93TbTzMnlrN6/view?usp=share_link) | | PG19 | test | [pg19/test.bin](https://drive.google.com/file/d/1QANDMdctpacPAYgS04adDXqByGEq-Ret/view?usp=share_link) | | Proof-pile | test | [proof-pile/test_sampled_data.bin](https://drive.google.com/file/d/1bUI5lPDvrqzY_XXJJ2sSuvZx0Y9AZClE/view?usp=share_link) | ### Passkey Retrieval We provide a manner to test the passkey retrieval accuracy. For example, ``` python3 passkey_retrivial.py \ --context_size 32768 \ --base_model path_to/Llama-2-7b-longlora-32k \ --max_tokens 32768 \ --interval 1000 ``` - Note that the `context_size` is the context length during fine-tuning. - `max_tokens` is maximum length for the document in passkey retrieval evaluation. - `interval` is the interval during the document length increasing. It is a rough number because the document increases by sentences. ## Demo ### Local Inference To chat with [Llama-2-13b-chat-longlora-32k-sft](https://huggingface.co/Yukang/Llama-2-13b-chat-longlora-32k-sft) or [Llama-2-70b-chat-longlora-32k-sft](https://huggingface.co/Yukang/Llama-2-70b-chat-longlora-32k-sft), you need to run `merge_lora_weights_and_save_hf_model.py` first, and then: ``` python3 inference.py \ --base_model path_to_model \ --question $question \ --context_size $context_length \ --max_gen_len $max_gen_len \ --flash_attn True \ --material $material_content \ --material_type $material_type \ --material_title $material_title ``` To ask a question related to a book: ``` python3 inference.py \ --base_model /data/models/Llama-2-13b-chat-longlora-32k-sft \ --question "Why doesn't Professor Snape seem to like Harry?" \ --context_size 32768 \ --max_gen_len 512 \ --flash_attn True \ --material "materials/Harry Potter and the Philosophers Stone_section2.txt" \ --material_type "book" \ --material_title "Harry Potter and the Philosophers Stone" ``` Note that you can ignore `material_type` or `material_title`. To ask a question related to a paper: ``` python3 inference.py \ --base_model /data/models/Llama-2-13b-chat-longlora-32k-sft \ --question "What are the main contributions and novelties of this work?" \ --context_size 32768 \ --max_gen_len 512 \ --flash_attn True \ --material "materials/paper1.txt" \ --material_type "paper" ``` ### Online Demo To deploy your own demo run ``` python3 demo.py \ --base_model path_to_model \ --context_size $context_size \ --max_gen_len $max_gen_len \ --flash_attn True ``` Example ``` python3 demo.py \ --base_model /data/models/Llama-2-13b-chat-longlora-32k-sft \ --context_size 32768 \ --max_gen_len 512 \ --flash_attn True ``` - Note that `flash_attn=True` will make the generation slow but save much GPU memory. ## Data Generation via Pdf2text During our dataset collection, we convert paper and books from pdf to text. The conversion quality has a large influence on the final model quality. We think that this step is non-trivial. We release the tool for the pdf2txt conversion, in the folder `pdf2txt`. It is built upon `pdf2image`, `easyocr`, `ditod` and `detectron2`. Please refer to the [README.md](pdf2txt/README.md) in `pdf2txt` for more details. ## Citation If you find this project useful in your research, please consider citing: ``` @article{longlora, title={LongLoRA: Efficient Fine-tuning of Long-Context Large Language Models}, author={Yukang Chen and Shengju Qian and Haotian Tang and Xin Lai and Zhijian Liu and Song Han and Jiaya Jia}, journal={arXiv:2309.12307}, year={2023} } ``` ``` @misc{long-alpaca, author = {Yukang Chen and Shaozuo Yu and Shengju Qian and Haotian Tang and Xin Lai and Zhijian Liu and Song Han and Jiaya Jia}, title = {Long Alpaca: Long-context Instruction-following models}, year = {2023}, publisher = {GitHub}, journal = {GitHub repository}, howpublished = {\url{https://github.com/dvlab-research/LongLoRA}}, } ``` ## Acknowledgement - This work is built upon the [LLaMA2](https://ai.meta.com/llama) as the pre-trained models. - This work can also be built upon the [GPTNeoX-HF](https://huggingface.co/docs/transformers/model_doc/gpt_neox) which is based upon [EleutherAI/GPTNeoX](https://github.com/EleutherAI/gpt-neox) as the pre-trained model architecture. - This work is based on [DeepSpeed](https://github.com/microsoft/DeepSpeed), [peft](https://github.com/huggingface/peft), and [Flash-Attention2](https://github.com/Dao-AILab/flash-attention) for acceleration. - Some evaluation code is modified upon [Landmark Attention](https://github.com/epfml/landmark-attention). - We use [LongChat](https://github.com/DachengLi1/LongChat) for the retrieval evaluation. ## License - LongLoRA is licensed under the Apache License 2.0. This means that it requires the preservation of copyright and license notices. - Data and weights are under CC-BY-NC 4.0 License. They are licensed for research use only, and allowed only non-commercial. Models trained using the dataset should not be used outside of research purposes.
Yukang/LongAlpaca-12k
[ "arxiv:2309.12307", "region:us" ]
2023-10-09T02:21:25+00:00
{}
2023-10-11T03:03:27+00:00
[ "2309.12307" ]
[]
TAGS #arxiv-2309.12307 #region-us
LongLoRA and LongAlpaca for Long-context LLMs ============================================= ![Huggingface Models](URL ![Github](URL ![Data](URL ![Paper](URL ![Code License](URL ![Data License](URL ![Weight License](URL For detailed usage and codes, please visit the Github project. TABLE OF CONTENTS ----------------- 1. News 2. Examples 3. Highlights 4. How to contribute 5. Requirements 6. Installation and quick guide 7. LongAlpaca Data 8. Models 9. Training 10. Evaluation 11. Demo 12. Data Generation via Pdf2Text 13. Citation 14. Acknowledgement 15. License News ---- * [x] [2023.10.8] We release the long instruction-following dataset, LongAlpaca-12k and the corresponding models, LongAlpaca-7B, LongAlpaca-13B, and LongAlpaca-70B. * (*The previous sft models*, Llama-2-13b-chat-longlora-32k-sft and Llama-2-70b-chat-longlora-32k-sft, *have been depreciated*.) * [x] [2023.10.3] We add support GPTNeoX models. Please refer to this PR for usage. Thanks for @naubull2 for this contribution. * [x] [2023.9.22] We release all our fine-tuned models, including 70B-32k models, LLaMA2-LongLoRA-70B-32k, LLaMA2-LongLoRA-7B-100k. Welcome to check them out! * [x] [2023.9.22] We release Paper and this GitHub repo, including training and evaluation code. LongLoRA: Efficient Fine-tuning of Long-Context Large Language Models [Paper] Yukang Chen, Shengju Qian, Haotian Tang, Xin Lai, Zhijian Liu, Song Han, Jiaya Jia Highlights ---------- 1. In LongLoRA approach, The proposed shifted short attention is easy to implement, compatible with Flash-Attention, and is not required during inference. 2. We released all our models, including models from 7B to 70B, context length from 8k to 100k, including LLaMA2-LongLoRA-7B-100k, LLaMA2-LongLoRA-13B-64k, and LLaMA2-LongLoRA-70B-32k. 3. We built up a long-context instruction-following dataset, LongAlpaca-12k. We released the corresponding LongAlpaca-7B, LongAlpaca-13B and LongAlpaca-70B models. To our best knowledge, this is the first open-sourced long-context 70B model. How to Contribute ----------------- * Make sure to have git installed. * Create your own fork of the project. * Clone the repository on your local machine, using git clone and pasting the url of this project. * Read both the 'Requirements' and 'Installation and Quick Guide' sections below. * Commit and push your changes. * Make a pull request when finished modifying the project. Usage Requirements ------------------ To download and use the pre-trained weights you will need: 1. Hugging Face (HF) account with valid email. Note, the email used for HF must alse be used for the license agreement. 2. Accept the Meta license and acceptable use policy Installation and Quick Guide ---------------------------- To install and run the application: 1. Fork this repo on github 2. Clone the repository on your local machine, using git clone and pasting the url of this project. 3. Run the following code: 4. Use either a Released model or Fine tune a model to fit your preferences. 5. Test your model by chat. 6. Deploy your own demo. LongAlpaca Data --------------- LongAlpaca-12k contains 9k long QA data that we collected and 3k short QA sampled from the original Alpaca data. This is to avoid the case that the model might degrade at short instruction following. The data we collect contains various types and amounts as the following figure. Following the original Alpaca format, our Long QA data uses the following prompts for fine-tuning: * 'instruction': 'str', describes the task the model should perform. For example, to answer a question after reading a book section or paper. We vary the contents and questions to make instructions diverse. * 'output': 'str', the answer to the instruction. We did not use the 'input' format in the Alpaca format for simplicity. Models ------ ### Models with supervised fine-tuning ### Models with context extension via fully fine-tuning ### Models with context extension via improved LoRA fine-tuning Training -------- ### Pre-trained weights We use LLaMA2 models as the pre-trained weights and fine-tune them to long context window sizes. Download based on your choices. This project also supports GPTNeoX models as the base model architecture. Some candidate pre-trained weights may include GPT-NeoX-20B, Polyglot-ko-12.8B and other variants. ### Fine-tuning * Please remember to change 'path\_to/Llama-2-7b-hf', 'path\_to\_saving\_checkpoints', 'path\_to\_cache' to your own directory. * Note that you can change 'model\_max\_length' to other values. * You could change 'ds\_configs/URL' to 'ds\_configs/URL' if you want. * Please set 'use\_flash\_attn' as 'False' if you use V100 machines or do not install flash attention. * You can set 'low\_rank\_training' as 'False' if you want to use fully fine-tuning. It will cost more GPU memory and slower, but the performance will be a bit better. * When training is finished, to get the full model weight: ### Supervised Fine-tuning * There is no need to make supervised fine-tuning upon the fine-tuned context extended models. It is all right to directly use base model as Llama2-chat models, as the amount of long instruction following data is enough for SFT. * Our long instruction following data can be found in URL. ### Get trainable weights in low-rank training In low-rank training, we set embedding and normalization layers as trainable. Please use the following line to extract the trainable weights 'trainable\_params.bin' from 'pytorch\_model.bin' ### Merge LoRA Weight Merge the LoRA weights of 'pytorch\_model.bin' and trainable parameters 'trainable\_params.bin', save the resulting model into your desired path in the Hugging Face format: For example, Evaluation ---------- ### Perplexity Validation To evaluate a model that is trained in the low-rank setting, please set both 'base\_model' and 'peft\_model'. 'base\_model' is the pre-trained weight. 'peft\_model' is the path to the saved checkpoint, which should contain 'trainable\_params.bin', 'adapter\_model.bin' and 'adapter\_config.json'. For example, To evaluate a model that is fully fine-tuned, you only need to set 'base\_model' as the path to the saved checkpoint, which should contain 'pytorch\_model.bin' and 'URL'. 'peft\_model' should be ignored. * Note that '--seq\_len' is to set the sequence length for evaluation. '--context\_size' is to set the context length of the model during fine-tuning. '--seq\_len' should not be larger than '--context\_size'. * We have already tokenized the validation and test splits of PG19 and proof-pile dataset into 'pg19/URL', 'pg19/URL', and 'proof-pile/test\_sampled\_data.bin', with the tokenizer of LLaMA. 'proof-pile/test\_sampled\_data.bin' contains 128 documents that are randomly sampled from the total proof-pile test split. For each document, it has at least 32768 tokens. We also release the sampled ids in proof-pile/test\_sampled\_ids.bin. You can download them from the links below. ### Passkey Retrieval We provide a manner to test the passkey retrieval accuracy. For example, * Note that the 'context\_size' is the context length during fine-tuning. * 'max\_tokens' is maximum length for the document in passkey retrieval evaluation. * 'interval' is the interval during the document length increasing. It is a rough number because the document increases by sentences. Demo ---- ### Local Inference To chat with Llama-2-13b-chat-longlora-32k-sft or Llama-2-70b-chat-longlora-32k-sft, you need to run 'merge\_lora\_weights\_and\_save\_hf\_model.py' first, and then: To ask a question related to a book: Note that you can ignore 'material\_type' or 'material\_title'. To ask a question related to a paper: ### Online Demo To deploy your own demo run Example * Note that 'flash\_attn=True' will make the generation slow but save much GPU memory. Data Generation via Pdf2text ---------------------------- During our dataset collection, we convert paper and books from pdf to text. The conversion quality has a large influence on the final model quality. We think that this step is non-trivial. We release the tool for the pdf2txt conversion, in the folder 'pdf2txt'. It is built upon 'pdf2image', 'easyocr', 'ditod' and 'detectron2'. Please refer to the URL in 'pdf2txt' for more details. If you find this project useful in your research, please consider citing: Acknowledgement --------------- * This work is built upon the LLaMA2 as the pre-trained models. * This work can also be built upon the GPTNeoX-HF which is based upon EleutherAI/GPTNeoX as the pre-trained model architecture. * This work is based on DeepSpeed, peft, and Flash-Attention2 for acceleration. * Some evaluation code is modified upon Landmark Attention. * We use LongChat for the retrieval evaluation. License ------- * LongLoRA is licensed under the Apache License 2.0. This means that it requires the preservation of copyright and license notices. * Data and weights are under CC-BY-NC 4.0 License. They are licensed for research use only, and allowed only non-commercial. Models trained using the dataset should not be used outside of research purposes.
[ "### Models with supervised fine-tuning", "### Models with context extension via fully fine-tuning", "### Models with context extension via improved LoRA fine-tuning\n\n\n\nTraining\n--------", "### Pre-trained weights\n\n\nWe use LLaMA2 models as the pre-trained weights and fine-tune them to long context window sizes. Download based on your choices.\n\n\n\nThis project also supports GPTNeoX models as the base model architecture. Some candidate pre-trained weights may include GPT-NeoX-20B, Polyglot-ko-12.8B and other variants.", "### Fine-tuning\n\n\n* Please remember to change 'path\\_to/Llama-2-7b-hf', 'path\\_to\\_saving\\_checkpoints', 'path\\_to\\_cache' to your own directory.\n* Note that you can change 'model\\_max\\_length' to other values.\n* You could change 'ds\\_configs/URL' to 'ds\\_configs/URL' if you want.\n* Please set 'use\\_flash\\_attn' as 'False' if you use V100 machines or do not install flash attention.\n* You can set 'low\\_rank\\_training' as 'False' if you want to use fully fine-tuning. It will cost more GPU memory and slower, but the performance will be a bit better.\n* When training is finished, to get the full model weight:", "### Supervised Fine-tuning\n\n\n* There is no need to make supervised fine-tuning upon the fine-tuned context extended models. It is all right to directly use base model as Llama2-chat models, as the amount of long instruction following data is enough for SFT.\n* Our long instruction following data can be found in URL.", "### Get trainable weights in low-rank training\n\n\nIn low-rank training, we set embedding and normalization layers as trainable. Please use the following line to extract the trainable weights 'trainable\\_params.bin' from 'pytorch\\_model.bin'", "### Merge LoRA Weight\n\n\nMerge the LoRA weights of 'pytorch\\_model.bin' and trainable parameters 'trainable\\_params.bin', save the resulting model into your desired path in the Hugging Face format:\n\n\nFor example,\n\n\nEvaluation\n----------", "### Perplexity Validation\n\n\nTo evaluate a model that is trained in the low-rank setting, please set both 'base\\_model' and 'peft\\_model'. 'base\\_model' is the pre-trained weight. 'peft\\_model' is the path to the saved checkpoint, which should contain 'trainable\\_params.bin', 'adapter\\_model.bin' and 'adapter\\_config.json'. For example,\n\n\nTo evaluate a model that is fully fine-tuned, you only need to set 'base\\_model' as the path to the saved checkpoint, which should contain 'pytorch\\_model.bin' and 'URL'. 'peft\\_model' should be ignored.\n\n\n* Note that '--seq\\_len' is to set the sequence length for evaluation. '--context\\_size' is to set the context length of the model during fine-tuning. '--seq\\_len' should not be larger than '--context\\_size'.\n* We have already tokenized the validation and test splits of PG19 and proof-pile dataset into 'pg19/URL', 'pg19/URL', and 'proof-pile/test\\_sampled\\_data.bin', with the tokenizer of LLaMA. 'proof-pile/test\\_sampled\\_data.bin' contains 128 documents that are randomly sampled from the total proof-pile test split. For each document, it has at least 32768 tokens. We also release the sampled ids in proof-pile/test\\_sampled\\_ids.bin. You can download them from the links below.", "### Passkey Retrieval\n\n\nWe provide a manner to test the passkey retrieval accuracy. For example,\n\n\n* Note that the 'context\\_size' is the context length during fine-tuning.\n* 'max\\_tokens' is maximum length for the document in passkey retrieval evaluation.\n* 'interval' is the interval during the document length increasing. It is a rough number because the document increases by sentences.\n\n\nDemo\n----", "### Local Inference\n\n\nTo chat with Llama-2-13b-chat-longlora-32k-sft or Llama-2-70b-chat-longlora-32k-sft, you need to run 'merge\\_lora\\_weights\\_and\\_save\\_hf\\_model.py' first, and then:\n\n\nTo ask a question related to a book:\n\n\nNote that you can ignore 'material\\_type' or 'material\\_title'.\n\n\nTo ask a question related to a paper:", "### Online Demo\n\n\nTo deploy your own demo run\n\n\nExample\n\n\n* Note that 'flash\\_attn=True' will make the generation slow but save much GPU memory.\n\n\nData Generation via Pdf2text\n----------------------------\n\n\nDuring our dataset collection, we convert paper and books from pdf to text. The conversion quality has a large influence on the final model quality. We think that this step is non-trivial. We release the tool for the pdf2txt conversion, in the folder 'pdf2txt'. It is built upon 'pdf2image', 'easyocr', 'ditod' and 'detectron2'. Please refer to the URL in 'pdf2txt' for more details.\n\n\nIf you find this project useful in your research, please consider citing:\n\n\nAcknowledgement\n---------------\n\n\n* This work is built upon the LLaMA2 as the pre-trained models.\n* This work can also be built upon the GPTNeoX-HF which is based upon EleutherAI/GPTNeoX as the pre-trained model architecture.\n* This work is based on DeepSpeed, peft, and Flash-Attention2 for acceleration.\n* Some evaluation code is modified upon Landmark Attention.\n* We use LongChat for the retrieval evaluation.\n\n\nLicense\n-------\n\n\n* LongLoRA is licensed under the Apache License 2.0. This means that it requires the preservation of copyright and license notices.\n* Data and weights are under CC-BY-NC 4.0 License. They are licensed for research use only, and allowed only non-commercial. Models trained using the dataset should not be used outside of research purposes." ]
[ "TAGS\n#arxiv-2309.12307 #region-us \n", "### Models with supervised fine-tuning", "### Models with context extension via fully fine-tuning", "### Models with context extension via improved LoRA fine-tuning\n\n\n\nTraining\n--------", "### Pre-trained weights\n\n\nWe use LLaMA2 models as the pre-trained weights and fine-tune them to long context window sizes. Download based on your choices.\n\n\n\nThis project also supports GPTNeoX models as the base model architecture. Some candidate pre-trained weights may include GPT-NeoX-20B, Polyglot-ko-12.8B and other variants.", "### Fine-tuning\n\n\n* Please remember to change 'path\\_to/Llama-2-7b-hf', 'path\\_to\\_saving\\_checkpoints', 'path\\_to\\_cache' to your own directory.\n* Note that you can change 'model\\_max\\_length' to other values.\n* You could change 'ds\\_configs/URL' to 'ds\\_configs/URL' if you want.\n* Please set 'use\\_flash\\_attn' as 'False' if you use V100 machines or do not install flash attention.\n* You can set 'low\\_rank\\_training' as 'False' if you want to use fully fine-tuning. It will cost more GPU memory and slower, but the performance will be a bit better.\n* When training is finished, to get the full model weight:", "### Supervised Fine-tuning\n\n\n* There is no need to make supervised fine-tuning upon the fine-tuned context extended models. It is all right to directly use base model as Llama2-chat models, as the amount of long instruction following data is enough for SFT.\n* Our long instruction following data can be found in URL.", "### Get trainable weights in low-rank training\n\n\nIn low-rank training, we set embedding and normalization layers as trainable. Please use the following line to extract the trainable weights 'trainable\\_params.bin' from 'pytorch\\_model.bin'", "### Merge LoRA Weight\n\n\nMerge the LoRA weights of 'pytorch\\_model.bin' and trainable parameters 'trainable\\_params.bin', save the resulting model into your desired path in the Hugging Face format:\n\n\nFor example,\n\n\nEvaluation\n----------", "### Perplexity Validation\n\n\nTo evaluate a model that is trained in the low-rank setting, please set both 'base\\_model' and 'peft\\_model'. 'base\\_model' is the pre-trained weight. 'peft\\_model' is the path to the saved checkpoint, which should contain 'trainable\\_params.bin', 'adapter\\_model.bin' and 'adapter\\_config.json'. For example,\n\n\nTo evaluate a model that is fully fine-tuned, you only need to set 'base\\_model' as the path to the saved checkpoint, which should contain 'pytorch\\_model.bin' and 'URL'. 'peft\\_model' should be ignored.\n\n\n* Note that '--seq\\_len' is to set the sequence length for evaluation. '--context\\_size' is to set the context length of the model during fine-tuning. '--seq\\_len' should not be larger than '--context\\_size'.\n* We have already tokenized the validation and test splits of PG19 and proof-pile dataset into 'pg19/URL', 'pg19/URL', and 'proof-pile/test\\_sampled\\_data.bin', with the tokenizer of LLaMA. 'proof-pile/test\\_sampled\\_data.bin' contains 128 documents that are randomly sampled from the total proof-pile test split. For each document, it has at least 32768 tokens. We also release the sampled ids in proof-pile/test\\_sampled\\_ids.bin. You can download them from the links below.", "### Passkey Retrieval\n\n\nWe provide a manner to test the passkey retrieval accuracy. For example,\n\n\n* Note that the 'context\\_size' is the context length during fine-tuning.\n* 'max\\_tokens' is maximum length for the document in passkey retrieval evaluation.\n* 'interval' is the interval during the document length increasing. It is a rough number because the document increases by sentences.\n\n\nDemo\n----", "### Local Inference\n\n\nTo chat with Llama-2-13b-chat-longlora-32k-sft or Llama-2-70b-chat-longlora-32k-sft, you need to run 'merge\\_lora\\_weights\\_and\\_save\\_hf\\_model.py' first, and then:\n\n\nTo ask a question related to a book:\n\n\nNote that you can ignore 'material\\_type' or 'material\\_title'.\n\n\nTo ask a question related to a paper:", "### Online Demo\n\n\nTo deploy your own demo run\n\n\nExample\n\n\n* Note that 'flash\\_attn=True' will make the generation slow but save much GPU memory.\n\n\nData Generation via Pdf2text\n----------------------------\n\n\nDuring our dataset collection, we convert paper and books from pdf to text. The conversion quality has a large influence on the final model quality. We think that this step is non-trivial. We release the tool for the pdf2txt conversion, in the folder 'pdf2txt'. It is built upon 'pdf2image', 'easyocr', 'ditod' and 'detectron2'. Please refer to the URL in 'pdf2txt' for more details.\n\n\nIf you find this project useful in your research, please consider citing:\n\n\nAcknowledgement\n---------------\n\n\n* This work is built upon the LLaMA2 as the pre-trained models.\n* This work can also be built upon the GPTNeoX-HF which is based upon EleutherAI/GPTNeoX as the pre-trained model architecture.\n* This work is based on DeepSpeed, peft, and Flash-Attention2 for acceleration.\n* Some evaluation code is modified upon Landmark Attention.\n* We use LongChat for the retrieval evaluation.\n\n\nLicense\n-------\n\n\n* LongLoRA is licensed under the Apache License 2.0. This means that it requires the preservation of copyright and license notices.\n* Data and weights are under CC-BY-NC 4.0 License. They are licensed for research use only, and allowed only non-commercial. Models trained using the dataset should not be used outside of research purposes." ]
[ 14, 12, 13, 19, 92, 199, 78, 66, 66, 406, 99, 116, 362 ]
[ "passage: TAGS\n#arxiv-2309.12307 #region-us \n### Models with supervised fine-tuning### Models with context extension via fully fine-tuning### Models with context extension via improved LoRA fine-tuning\n\n\n\nTraining\n--------### Pre-trained weights\n\n\nWe use LLaMA2 models as the pre-trained weights and fine-tune them to long context window sizes. Download based on your choices.\n\n\n\nThis project also supports GPTNeoX models as the base model architecture. Some candidate pre-trained weights may include GPT-NeoX-20B, Polyglot-ko-12.8B and other variants.### Fine-tuning\n\n\n* Please remember to change 'path\\_to/Llama-2-7b-hf', 'path\\_to\\_saving\\_checkpoints', 'path\\_to\\_cache' to your own directory.\n* Note that you can change 'model\\_max\\_length' to other values.\n* You could change 'ds\\_configs/URL' to 'ds\\_configs/URL' if you want.\n* Please set 'use\\_flash\\_attn' as 'False' if you use V100 machines or do not install flash attention.\n* You can set 'low\\_rank\\_training' as 'False' if you want to use fully fine-tuning. It will cost more GPU memory and slower, but the performance will be a bit better.\n* When training is finished, to get the full model weight:### Supervised Fine-tuning\n\n\n* There is no need to make supervised fine-tuning upon the fine-tuned context extended models. It is all right to directly use base model as Llama2-chat models, as the amount of long instruction following data is enough for SFT.\n* Our long instruction following data can be found in URL.### Get trainable weights in low-rank training\n\n\nIn low-rank training, we set embedding and normalization layers as trainable. Please use the following line to extract the trainable weights 'trainable\\_params.bin' from 'pytorch\\_model.bin'", "passage: ### Merge LoRA Weight\n\n\nMerge the LoRA weights of 'pytorch\\_model.bin' and trainable parameters 'trainable\\_params.bin', save the resulting model into your desired path in the Hugging Face format:\n\n\nFor example,\n\n\nEvaluation\n----------### Perplexity Validation\n\n\nTo evaluate a model that is trained in the low-rank setting, please set both 'base\\_model' and 'peft\\_model'. 'base\\_model' is the pre-trained weight. 'peft\\_model' is the path to the saved checkpoint, which should contain 'trainable\\_params.bin', 'adapter\\_model.bin' and 'adapter\\_config.json'. For example,\n\n\nTo evaluate a model that is fully fine-tuned, you only need to set 'base\\_model' as the path to the saved checkpoint, which should contain 'pytorch\\_model.bin' and 'URL'. 'peft\\_model' should be ignored.\n\n\n* Note that '--seq\\_len' is to set the sequence length for evaluation. '--context\\_size' is to set the context length of the model during fine-tuning. '--seq\\_len' should not be larger than '--context\\_size'.\n* We have already tokenized the validation and test splits of PG19 and proof-pile dataset into 'pg19/URL', 'pg19/URL', and 'proof-pile/test\\_sampled\\_data.bin', with the tokenizer of LLaMA. 'proof-pile/test\\_sampled\\_data.bin' contains 128 documents that are randomly sampled from the total proof-pile test split. For each document, it has at least 32768 tokens. We also release the sampled ids in proof-pile/test\\_sampled\\_ids.bin. You can download them from the links below.### Passkey Retrieval\n\n\nWe provide a manner to test the passkey retrieval accuracy. For example,\n\n\n* Note that the 'context\\_size' is the context length during fine-tuning.\n* 'max\\_tokens' is maximum length for the document in passkey retrieval evaluation.\n* 'interval' is the interval during the document length increasing. It is a rough number because the document increases by sentences.\n\n\nDemo\n----" ]
5cc7c436cc83d53573213a86fccd0989deb6dc39
# Dataset Card for "COVID-QA-Chunk-64-sentence-transformer-biencoder-data-65_25_10-v2" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
minh21/COVID-QA-Chunk-64-sentence-transformer-biencoder-data-65_25_10-v2
[ "region:us" ]
2023-10-09T02:47:47+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "question", "dtype": "string"}, {"name": "positive", "dtype": "string"}, {"name": "negative", "dtype": "string"}, {"name": "document_id", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 7075815, "num_examples": 6419}, {"name": "test", "num_bytes": 806462, "num_examples": 723}], "download_size": 724649, "dataset_size": 7882277}}
2023-10-09T02:47:50+00:00
[]
[]
TAGS #region-us
# Dataset Card for "COVID-QA-Chunk-64-sentence-transformer-biencoder-data-65_25_10-v2" More Information needed
[ "# Dataset Card for \"COVID-QA-Chunk-64-sentence-transformer-biencoder-data-65_25_10-v2\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"COVID-QA-Chunk-64-sentence-transformer-biencoder-data-65_25_10-v2\"\n\nMore Information needed" ]
[ 6, 40 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"COVID-QA-Chunk-64-sentence-transformer-biencoder-data-65_25_10-v2\"\n\nMore Information needed" ]
2d5dde21e20cc29dd8bd43502dfff84170a82361
# Dataset Card for "COVID-QA-Chunk-64-testset-biencoder-data-65_25_10-v2" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
minh21/COVID-QA-Chunk-64-testset-biencoder-data-65_25_10-v2
[ "region:us" ]
2023-10-09T02:47:51+00:00
{"dataset_info": {"features": [{"name": "question", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "context_chunks", "sequence": "string"}, {"name": "document_id", "dtype": "int64"}, {"name": "id", "dtype": "int64"}, {"name": "context", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 13595044, "num_examples": 203}], "download_size": 0, "dataset_size": 13595044}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-09T10:18:22+00:00
[]
[]
TAGS #region-us
# Dataset Card for "COVID-QA-Chunk-64-testset-biencoder-data-65_25_10-v2" More Information needed
[ "# Dataset Card for \"COVID-QA-Chunk-64-testset-biencoder-data-65_25_10-v2\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"COVID-QA-Chunk-64-testset-biencoder-data-65_25_10-v2\"\n\nMore Information needed" ]
[ 6, 37 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"COVID-QA-Chunk-64-testset-biencoder-data-65_25_10-v2\"\n\nMore Information needed" ]
32a25a8123f8cae0ec59888afd61278938d39949
# Dataset Card for "COVID-QA-Chunk-64-question-answering-biencoder-data-65_25_10-v2" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
minh21/COVID-QA-Chunk-64-question-answering-biencoder-data-65_25_10-v2
[ "region:us" ]
2023-10-09T02:48:22+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}]}], "dataset_info": {"features": [{"name": "question", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "context_chunks", "sequence": "string"}, {"name": "document_id", "dtype": "int64"}, {"name": "id", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 50185273, "num_examples": 1176}, {"name": "validation", "num_bytes": 4744842, "num_examples": 134}], "download_size": 13948442, "dataset_size": 54930115}}
2023-10-09T02:48:25+00:00
[]
[]
TAGS #region-us
# Dataset Card for "COVID-QA-Chunk-64-question-answering-biencoder-data-65_25_10-v2" More Information needed
[ "# Dataset Card for \"COVID-QA-Chunk-64-question-answering-biencoder-data-65_25_10-v2\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"COVID-QA-Chunk-64-question-answering-biencoder-data-65_25_10-v2\"\n\nMore Information needed" ]
[ 6, 40 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"COVID-QA-Chunk-64-question-answering-biencoder-data-65_25_10-v2\"\n\nMore Information needed" ]
cafe930f11366e43efa3e60c6859484644ff3d67
# Dataset Card for "end_sur_DA_tokenized" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
carnival13/end_sur_DA_tokenized
[ "region:us" ]
2023-10-09T02:55:18+00:00
{"dataset_info": {"features": [{"name": "pass_label", "dtype": "int64"}, {"name": "input_ids", "sequence": "int32"}, {"name": "attention_mask", "sequence": "int8"}], "splits": [{"name": "train", "num_bytes": 127709805, "num_examples": 160590}], "download_size": 27943074, "dataset_size": 127709805}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-10-09T02:55:21+00:00
[]
[]
TAGS #region-us
# Dataset Card for "end_sur_DA_tokenized" More Information needed
[ "# Dataset Card for \"end_sur_DA_tokenized\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"end_sur_DA_tokenized\"\n\nMore Information needed" ]
[ 6, 19 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"end_sur_DA_tokenized\"\n\nMore Information needed" ]
bf07ab6aa16990b807f046594a3b3dcc12b11930
# Dataset Card for Evaluation run of elliotthwang/elliott_Llama-2-7b-hf ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/elliotthwang/elliott_Llama-2-7b-hf - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [elliotthwang/elliott_Llama-2-7b-hf](https://huggingface.co/elliotthwang/elliott_Llama-2-7b-hf) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_elliotthwang__elliott_Llama-2-7b-hf", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-26T23:30:10.386120](https://huggingface.co/datasets/open-llm-leaderboard/details_elliotthwang__elliott_Llama-2-7b-hf/blob/main/results_2023-10-26T23-30-10.386120.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.001153523489932886, "em_stderr": 0.00034761798968571027, "f1": 0.05575817953020141, "f1_stderr": 0.001306153544964195, "acc": 0.4026884110741377, "acc_stderr": 0.009681922567248534 }, "harness|drop|3": { "em": 0.001153523489932886, "em_stderr": 0.00034761798968571027, "f1": 0.05575817953020141, "f1_stderr": 0.001306153544964195 }, "harness|gsm8k|5": { "acc": 0.06899166034874905, "acc_stderr": 0.006980995834838602 }, "harness|winogrande|5": { "acc": 0.7363851617995264, "acc_stderr": 0.012382849299658464 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_elliotthwang__elliott_Llama-2-7b-hf
[ "region:us" ]
2023-10-09T03:04:43+00:00
{"pretty_name": "Evaluation run of elliotthwang/elliott_Llama-2-7b-hf", "dataset_summary": "Dataset automatically created during the evaluation run of model [elliotthwang/elliott_Llama-2-7b-hf](https://huggingface.co/elliotthwang/elliott_Llama-2-7b-hf) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_elliotthwang__elliott_Llama-2-7b-hf\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-26T23:30:10.386120](https://huggingface.co/datasets/open-llm-leaderboard/details_elliotthwang__elliott_Llama-2-7b-hf/blob/main/results_2023-10-26T23-30-10.386120.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.001153523489932886,\n \"em_stderr\": 0.00034761798968571027,\n \"f1\": 0.05575817953020141,\n \"f1_stderr\": 0.001306153544964195,\n \"acc\": 0.4026884110741377,\n \"acc_stderr\": 0.009681922567248534\n },\n \"harness|drop|3\": {\n \"em\": 0.001153523489932886,\n \"em_stderr\": 0.00034761798968571027,\n \"f1\": 0.05575817953020141,\n \"f1_stderr\": 0.001306153544964195\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.06899166034874905,\n \"acc_stderr\": 0.006980995834838602\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7363851617995264,\n \"acc_stderr\": 0.012382849299658464\n }\n}\n```", "repo_url": "https://huggingface.co/elliotthwang/elliott_Llama-2-7b-hf", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|arc:challenge|25_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_26T23_30_10.386120", "path": ["**/details_harness|drop|3_2023-10-26T23-30-10.386120.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-26T23-30-10.386120.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_26T23_30_10.386120", "path": ["**/details_harness|gsm8k|5_2023-10-26T23-30-10.386120.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-26T23-30-10.386120.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hellaswag|10_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-management|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-virology|5_2023-10-09T04-04-19.372525.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-09T04-04-19.372525.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-10-09T04-04-19.372525.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_26T23_30_10.386120", "path": ["**/details_harness|winogrande|5_2023-10-26T23-30-10.386120.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-26T23-30-10.386120.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_10_09T04_04_19.372525", "path": ["results_2023-10-09T04-04-19.372525.parquet"]}, {"split": "2023_10_26T23_30_10.386120", "path": ["results_2023-10-26T23-30-10.386120.parquet"]}, {"split": "latest", "path": ["results_2023-10-26T23-30-10.386120.parquet"]}]}]}
2023-10-26T22:30:23+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of elliotthwang/elliott_Llama-2-7b-hf ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model elliotthwang/elliott_Llama-2-7b-hf on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-26T23:30:10.386120(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of elliotthwang/elliott_Llama-2-7b-hf", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model elliotthwang/elliott_Llama-2-7b-hf on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-26T23:30:10.386120(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of elliotthwang/elliott_Llama-2-7b-hf", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model elliotthwang/elliott_Llama-2-7b-hf on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-26T23:30:10.386120(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 26, 31, 174, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of elliotthwang/elliott_Llama-2-7b-hf## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model elliotthwang/elliott_Llama-2-7b-hf on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-26T23:30:10.386120(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]