Commit
·
5d488e4
1
Parent(s):
727de5d
Update train.jsonl
Browse files- train.jsonl +19 -20
train.jsonl
CHANGED
@@ -1,4 +1,3 @@
|
|
1 |
-
{"name": "", "hf_repo": "", "hf_subset": "", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": [], "suite": ["helm"], "prompt_function": ""}
|
2 |
{"name": "babi_qa", "hf_repo": "facebook/babi_qa", "hf_subset": "en-valid-qa1", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "babi_qa"}
|
3 |
{"name": "bbq", "hf_repo": "lighteval/bbq_helm", "hf_subset": "all", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "generative_acc", "bbq_ambiguous_bias", "bbq_unambiguous_bias"], "suite": ["helm"], "prompt_function": "bbq"}
|
4 |
{"name": "bbq_Age", "hf_repo": "lighteval/bbq_helm", "hf_subset": "Age", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "generative_acc", "bbq_ambiguous_bias", "bbq_unambiguous_bias"], "suite": ["helm"], "prompt_function": "bbq"}
|
@@ -103,9 +102,9 @@
|
|
103 |
{"name": "apps", "hf_repo": "codeparrot/apps", "hf_subset": "all", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 600, "stop_sequence": ["'''", "---", "\"\"\"", "\n\n\n"], "metric": ["code_eval_apps", "bias", "toxicity"], "suite": ["helm", "code_scenario"], "prompt_function": "apps"}
|
104 |
{"name": "hellaswag", "hf_repo": "hellaswag", "hf_subset": "default", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "hellaswag_helm"}
|
105 |
{"name": "openbookqa", "hf_repo": "openbookqa", "hf_subset": "main", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "commonsense_scenario"], "prompt_function": "openbookqa"}
|
106 |
-
{"name": "commonsenseqa", "hf_repo": "commonsense_qa", "hf_subset": "",
|
107 |
-
{"name": "piqa", "hf_repo": "piqa", "hf_subset": "",
|
108 |
-
{"name": "siqa", "hf_repo": "social_i_qa", "hf_subset": "",
|
109 |
{"name": "copyright_pilot", "hf_repo": "lighteval/copyright_helm", "hf_subset": "pilot", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["copyright"], "suite": ["helm", "copyright_scenario"], "prompt_function": "copyright"}
|
110 |
{"name": "copyright_n_books_1000-extractions_per_book_1-prefix_length_5", "hf_repo": "lighteval/copyright_helm", "hf_subset": "n_books_1000-extractions_per_book_1-prefix_length_5", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["copyright"], "suite": ["helm", "copyright_scenario"], "prompt_function": "copyright"}
|
111 |
{"name": "copyright_n_books_1000-extractions_per_book_1-prefix_length_25", "hf_repo": "lighteval/copyright_helm", "hf_subset": "n_books_1000-extractions_per_book_1-prefix_length_25", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["copyright"], "suite": ["helm", "copyright_scenario"], "prompt_function": "copyright"}
|
@@ -123,13 +122,13 @@
|
|
123 |
{"name": "copyright_prompt_num_line_1-min_lines_20.json", "hf_repo": "lighteval/copyright_helm", "hf_subset": "prompt_num_line_1-min_lines_20.json", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["copyright"], "suite": ["helm", "copyright_scenario"], "prompt_function": "copyright"}
|
124 |
{"name": "copyright_prompt_num_line_5-min_lines_20.json", "hf_repo": "lighteval/copyright_helm", "hf_subset": "prompt_num_line_5-min_lines_20.json", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["copyright"], "suite": ["helm", "copyright_scenario"], "prompt_function": "copyright"}
|
125 |
{"name": "copyright_prompt_num_line_10-min_lines_20.json", "hf_repo": "lighteval/copyright_helm", "hf_subset": "prompt_num_line_10-min_lines_20.json", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["copyright"], "suite": ["helm", "copyright_scenario"], "prompt_function": "copyright"}
|
126 |
-
{"name": "covid_dialogue", "hf_repo": "lighteval/covid_dialogue", "hf_subset": "",
|
127 |
-
{"name": "empathetic_dialogues", "hf_repo": "lighteval/empathetic_dialogues", "hf_subset": "",
|
128 |
{"name": "disinformation_wedging", "hf_repo": "lighteval/disinformation", "hf_subset": "wedging", "hf_avail_splits": ["validation"], "evaluation_splits": ["validation"], "generation_size": 90, "stop_sequence": ["\nTweet", "\nReason"], "metric": ["disinformation", "bias", "toxicity"], "suite": ["helm"], "prompt_function": "disinformation_wedging"}
|
129 |
{"name": "disinformation_climate", "hf_repo": "lighteval/disinformation", "hf_subset": "reiteration_climate", "hf_avail_splits": ["train", "validation"], "evaluation_splits": ["validation"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["disinformation", "bias", "toxicity"], "suite": ["helm"], "prompt_function": "disinformation_reiteration"}
|
130 |
{"name": "disinformation_covid", "hf_repo": "lighteval/disinformation", "hf_subset": "reiteration_covid", "hf_avail_splits": ["train", "validation"], "evaluation_splits": ["validation"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["disinformation", "bias", "toxicity"], "suite": ["helm"], "prompt_function": "disinformation_reiteration"}
|
131 |
-
{"name": "entity_data_imputation_Buy", "hf_repo": "lighteval/Buy", "hf_subset": "",
|
132 |
-
{"name": "entity_data_imputation_Restaurant", "hf_repo": "lighteval/Restaurant", "hf_subset": "",
|
133 |
{"name": "entity_matching_Beer", "hf_repo": "lighteval/EntityMatching", "hf_subset": "Beer", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "entity_matching"}
|
134 |
{"name": "entity_matching_iTunes_Amazon", "hf_repo": "lighteval/EntityMatching", "hf_subset": "iTunes_Amazon", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "entity_matching"}
|
135 |
{"name": "entity_matching_Fodors_Zagats", "hf_repo": "lighteval/EntityMatching", "hf_subset": "Fodors_Zagats", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "entity_matching"}
|
@@ -144,8 +143,8 @@
|
|
144 |
{"name": "entity_matching_Dirty_DBLP_GoogleScholar", "hf_repo": "lighteval/EntityMatching", "hf_subset": "Dirty_DBLP_GoogleScholar", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "entity_matching"}
|
145 |
{"name": "entity_matching_Dirty_Walmart_Amazon", "hf_repo": "lighteval/EntityMatching", "hf_subset": "Dirty_Walmart_Amazon", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "entity_matching"}
|
146 |
{"name": "gsm8k", "hf_repo": "gsm8k", "hf_subset": "main", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 400, "stop_sequence": ["\n\n"], "metric": ["exact_match_indicator", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "gsm8k_helm"}
|
147 |
-
{"name": "imdb", "hf_repo": "lighteval/IMDB_helm", "hf_subset": "",
|
148 |
-
{"name": "imdb_contrastset", "hf_repo": "lighteval/IMDB_helm", "hf_subset": "",
|
149 |
{"name": "interactive_qa_mmlu_college_chemistry", "hf_repo": "hendrycks_test", "hf_subset": "college_chemistry", "hf_avail_splits": ["dev", "test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "interactive_qa_mmlu_scenario"], "prompt_function": "mmlu_qa_college_chemistry"}
|
150 |
{"name": "interactive_qa_mmlu_global_facts", "hf_repo": "hendrycks_test", "hf_subset": "global_facts", "hf_avail_splits": ["dev", "test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "interactive_qa_mmlu_scenario"], "prompt_function": "mmlu_qa_global_facts"}
|
151 |
{"name": "interactive_qa_mmlu_miscellaneous", "hf_repo": "hendrycks_test", "hf_subset": "miscellaneous", "hf_avail_splits": ["dev", "test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "interactive_qa_mmlu_scenario"], "prompt_function": "mmlu_qa_miscellaneous"}
|
@@ -154,7 +153,7 @@
|
|
154 |
{"name": "legal_summarization_billsum", "hf_repo": "lighteval/legal_summarization", "hf_subset": "billsum", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 1024, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "rouge_1", "rouge_2", "rouge_l", "faithfulness", "extractiveness", "bert_score"], "suite": ["helm"], "prompt_function": "legal_summarization"}
|
155 |
{"name": "legal_summarization_eurlexsum", "hf_repo": "lighteval/legal_summarization", "hf_subset": "eurlexsum", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 2048, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "rouge_1", "rouge_2", "rouge_l", "faithfulness", "extractiveness", "bert_score"], "suite": ["helm"], "prompt_function": "legal_summarization"}
|
156 |
{"name": "legal_summarization_multilexsum", "hf_repo": "lighteval/legal_summarization", "hf_subset": "multilexsum", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 256, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "rouge_1", "rouge_2", "rouge_l", "faithfulness", "extractiveness", "bert_score"], "suite": ["helm"], "prompt_function": "multilexsum"}
|
157 |
-
{"name": "legalsupport", "hf_repo": "lighteval/LegalSupport", "hf_subset": "",
|
158 |
{"name": "lexglue_ecthr_a", "hf_repo": "lighteval/lexglue", "hf_subset": "ecthr_a", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 20, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score"], "suite": ["helm", "lex_glue_scenario"], "prompt_function": "lex_glue_ecthr_a"}
|
159 |
{"name": "lexglue_ecthr_b", "hf_repo": "lighteval/lexglue", "hf_subset": "ecthr_b", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 20, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score"], "suite": ["helm", "lex_glue_scenario"], "prompt_function": "lex_glue_ecthr_b"}
|
160 |
{"name": "lexglue_scotus", "hf_repo": "lighteval/lexglue", "hf_subset": "scotus", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score"], "suite": ["helm", "lex_glue_scenario"], "prompt_function": "lex_glue_scotus"}
|
@@ -192,20 +191,20 @@
|
|
192 |
{"name": "math_number_theory", "hf_repo": "lighteval/math", "hf_subset": "number_theory", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 20, "stop_sequence": ["$"], "metric": ["toxicity", "bias", "math"], "suite": ["helm", "math_scenario"], "prompt_function": "math_helm"}
|
193 |
{"name": "math_prealgebra", "hf_repo": "lighteval/math", "hf_subset": "prealgebra", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 20, "stop_sequence": ["$"], "metric": ["toxicity", "bias", "math"], "suite": ["helm", "math_scenario"], "prompt_function": "math_helm"}
|
194 |
{"name": "math_precalculus", "hf_repo": "lighteval/math", "hf_subset": "precalculus", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 20, "stop_sequence": ["$"], "metric": ["toxicity", "bias", "math"], "suite": ["helm", "math_scenario"], "prompt_function": "math_helm"}
|
195 |
-
{"name": "me_q_sum", "hf_repo": "lighteval/me_q_sum", "hf_subset": "",
|
196 |
{"name": "med_dialog_healthcaremagic", "hf_repo": "lighteval/med_dialog", "hf_subset": "healthcaremagic", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 128, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score", "rouge_l", "bleu_1", "bleu_4", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "med_dialog"}
|
197 |
{"name": "med_dialog_icliniq", "hf_repo": "lighteval/med_dialog", "hf_subset": "icliniq", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 128, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score", "rouge_l", "bleu_1", "bleu_4", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "med_dialog"}
|
198 |
-
{"name": "med_mcqa", "hf_repo": "lighteval/med_mcqa", "hf_subset": "",
|
199 |
-
{"name": "med_paragraph_simplification", "hf_repo": "lighteval/med_paragraph_simplification", "hf_subset": "",
|
200 |
{"name": "med_qa", "hf_repo": "bigbio/med_qa", "hf_subset": "med_qa_en_source", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "med_qa"}
|
201 |
-
{"name": "mmlu", "hf_repo": "cais/mmlu", "hf_subset": "",
|
202 |
{"name": "ms_marco_regular", "hf_repo": "lighteval/ms_marco", "hf_subset": "regular", "hf_avail_splits": ["train", "validation"], "evaluation_splits": ["validation"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["trec"], "suite": ["helm"], "prompt_function": "ms_marco"}
|
203 |
{"name": "ms_marco_trec", "hf_repo": "lighteval/ms_marco", "hf_subset": "trec", "hf_avail_splits": ["train", "validation"], "evaluation_splits": ["validation"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["trec"], "suite": ["helm"], "prompt_function": "ms_marco"}
|
204 |
-
{"name": "narrativeqa", "hf_repo": "narrativeqa", "hf_subset": "",
|
205 |
{"name": "natural_qa_openbook_wiki", "hf_repo": "lighteval/natural_questions", "hf_subset": "default", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 300, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "natural_qa_openbook_wiki"}
|
206 |
{"name": "natural_qa_openbook_longans", "hf_repo": "lighteval/natural_questions", "hf_subset": "default", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 300, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "natural_qa_openbook_longans"}
|
207 |
{"name": "natural_qa_closedbook", "hf_repo": "lighteval/natural_questions", "hf_subset": "default", "hf_avail_splits": ["train", "validation"], "evaluation_splits": ["validation"], "generation_size": 300, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "natural_qa_closedbook"}
|
208 |
-
{"name": "newsqa", "hf_repo": "lighteval/newsqa", "hf_subset": "",
|
209 |
{"name": "numeracy_linear_example", "hf_repo": "lighteval/numeracy", "hf_subset": "linear_example", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 20, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "absolute_value_difference"], "suite": ["helm"], "prompt_function": "numeracy"}
|
210 |
{"name": "numeracy_linear_standard", "hf_repo": "lighteval/numeracy", "hf_subset": "linear_standard", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 20, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "absolute_value_difference"], "suite": ["helm"], "prompt_function": "numeracy"}
|
211 |
{"name": "numeracy_parabola_example", "hf_repo": "lighteval/numeracy", "hf_subset": "parabola_example", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 20, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "absolute_value_difference"], "suite": ["helm"], "prompt_function": "numeracy"}
|
@@ -215,7 +214,7 @@
|
|
215 |
{"name": "numeracy_paraboloid_example", "hf_repo": "lighteval/numeracy", "hf_subset": "paraboloid_example", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 20, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "absolute_value_difference"], "suite": ["helm"], "prompt_function": "numeracy"}
|
216 |
{"name": "numeracy_paraboloid_standard", "hf_repo": "lighteval/numeracy", "hf_subset": "paraboloid_standard", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 20, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "absolute_value_difference"], "suite": ["helm"], "prompt_function": "numeracy"}
|
217 |
{"name": "pubmed_qa", "hf_repo": "pubmed_qa", "hf_subset": "pqa_labeled", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "pubmed_qa_helm"}
|
218 |
-
{"name": "quac", "hf_repo": "quac", "hf_subset": "",
|
219 |
{"name": "raft_ade_corpus_v2", "hf_repo": "ought/raft", "hf_subset": "ade_corpus_v2", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 30, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "raft"}
|
220 |
{"name": "raft_banking_77", "hf_repo": "ought/raft", "hf_subset": "banking_77", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 30, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "raft"}
|
221 |
{"name": "raft_neurips_impact_statement_risks", "hf_repo": "ought/raft", "hf_subset": "neurips_impact_statement_risks", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 30, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "raft"}
|
@@ -227,7 +226,7 @@
|
|
227 |
{"name": "raft_terms_of_service", "hf_repo": "ought/raft", "hf_subset": "terms_of_service", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 30, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "raft"}
|
228 |
{"name": "raft_tweet_eval_hate", "hf_repo": "ought/raft", "hf_subset": "tweet_eval_hate", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 30, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "raft"}
|
229 |
{"name": "raft_twitter_complaints", "hf_repo": "ought/raft", "hf_subset": "twitter_complaints", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 30, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "raft"}
|
230 |
-
{"name": "real_toxicity_prompts", "hf_repo": "allenai/real-toxicity-prompts", "hf_subset": "",
|
231 |
{"name": "summarization_xsum", "hf_repo": "lighteval/summarization", "hf_subset": "xsum", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 64, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "rouge_1", "rouge_2", "rouge_l", "faithfulness", "extractiveness", "bert_score"], "suite": ["helm"], "prompt_function": "xsum"}
|
232 |
{"name": "summarization_xsum-sampled", "hf_repo": "lighteval/summarization", "hf_subset": "xsum-sampled", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 64, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "rouge_1", "rouge_2", "rouge_l", "faithfulness", "extractiveness", "bert_score"], "suite": ["helm"], "prompt_function": "xsum"}
|
233 |
{"name": "summarization_cnn-dm", "hf_repo": "lighteval/summarization", "hf_subset": "cnn-dm", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 128, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "rouge_1", "rouge_2", "rouge_l", "faithfulness", "extractiveness", "bert_score"], "suite": ["helm"], "prompt_function": "cnn_dm"}
|
@@ -258,7 +257,7 @@
|
|
258 |
{"name": "the_pile_ubuntu-irc", "hf_repo": "pile_helm", "hf_subset": "ubuntu-irc", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"}
|
259 |
{"name": "the_pile_bookcorpus", "hf_repo": "pile_helm", "hf_subset": "bookcorpus", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"}
|
260 |
{"name": "the_pile_philpapers", "hf_repo": "pile_helm", "hf_subset": "philpapers", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"}
|
261 |
-
{"name": "truthfulqa", "hf_repo": "lighteval/truthfulqa_helm", "hf_subset": "",
|
262 |
{"name": "twitterAAE_aa", "hf_repo": "lighteval/twitterAAE", "hf_subset": "aa", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "twitter_aae"}
|
263 |
{"name": "twitterAAE_white", "hf_repo": "lighteval/twitterAAE", "hf_subset": "white", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "twitter_aae"}
|
264 |
{"name": "wikifact_genre", "hf_repo": "lighteval/wikifact", "hf_subset": "genre", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"}
|
|
|
|
|
1 |
{"name": "babi_qa", "hf_repo": "facebook/babi_qa", "hf_subset": "en-valid-qa1", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "babi_qa"}
|
2 |
{"name": "bbq", "hf_repo": "lighteval/bbq_helm", "hf_subset": "all", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "generative_acc", "bbq_ambiguous_bias", "bbq_unambiguous_bias"], "suite": ["helm"], "prompt_function": "bbq"}
|
3 |
{"name": "bbq_Age", "hf_repo": "lighteval/bbq_helm", "hf_subset": "Age", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "generative_acc", "bbq_ambiguous_bias", "bbq_unambiguous_bias"], "suite": ["helm"], "prompt_function": "bbq"}
|
|
|
102 |
{"name": "apps", "hf_repo": "codeparrot/apps", "hf_subset": "all", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 600, "stop_sequence": ["'''", "---", "\"\"\"", "\n\n\n"], "metric": ["code_eval_apps", "bias", "toxicity"], "suite": ["helm", "code_scenario"], "prompt_function": "apps"}
|
103 |
{"name": "hellaswag", "hf_repo": "hellaswag", "hf_subset": "default", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "hellaswag_helm"}
|
104 |
{"name": "openbookqa", "hf_repo": "openbookqa", "hf_subset": "main", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "commonsense_scenario"], "prompt_function": "openbookqa"}
|
105 |
+
{"name": "commonsenseqa", "hf_repo": "commonsense_qa", "hf_subset": "default","hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "commonsense_scenario"], "prompt_function": "commonsense_qa"}
|
106 |
+
{"name": "piqa", "hf_repo": "piqa", "hf_subset": "default","hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "commonsense_scenario"], "prompt_function": "piqa_helm"}
|
107 |
+
{"name": "siqa", "hf_repo": "social_i_qa", "hf_subset": "default","hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "commonsense_scenario"], "prompt_function": "siqa"}
|
108 |
{"name": "copyright_pilot", "hf_repo": "lighteval/copyright_helm", "hf_subset": "pilot", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["copyright"], "suite": ["helm", "copyright_scenario"], "prompt_function": "copyright"}
|
109 |
{"name": "copyright_n_books_1000-extractions_per_book_1-prefix_length_5", "hf_repo": "lighteval/copyright_helm", "hf_subset": "n_books_1000-extractions_per_book_1-prefix_length_5", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["copyright"], "suite": ["helm", "copyright_scenario"], "prompt_function": "copyright"}
|
110 |
{"name": "copyright_n_books_1000-extractions_per_book_1-prefix_length_25", "hf_repo": "lighteval/copyright_helm", "hf_subset": "n_books_1000-extractions_per_book_1-prefix_length_25", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["copyright"], "suite": ["helm", "copyright_scenario"], "prompt_function": "copyright"}
|
|
|
122 |
{"name": "copyright_prompt_num_line_1-min_lines_20.json", "hf_repo": "lighteval/copyright_helm", "hf_subset": "prompt_num_line_1-min_lines_20.json", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["copyright"], "suite": ["helm", "copyright_scenario"], "prompt_function": "copyright"}
|
123 |
{"name": "copyright_prompt_num_line_5-min_lines_20.json", "hf_repo": "lighteval/copyright_helm", "hf_subset": "prompt_num_line_5-min_lines_20.json", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["copyright"], "suite": ["helm", "copyright_scenario"], "prompt_function": "copyright"}
|
124 |
{"name": "copyright_prompt_num_line_10-min_lines_20.json", "hf_repo": "lighteval/copyright_helm", "hf_subset": "prompt_num_line_10-min_lines_20.json", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["copyright"], "suite": ["helm", "copyright_scenario"], "prompt_function": "copyright"}
|
125 |
+
{"name": "covid_dialogue", "hf_repo": "lighteval/covid_dialogue", "hf_subset": "default","hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 128, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score", "rouge_l", "bleu_1", "bleu_4", "bias", "toxicity"], "suite": ["helm"], "prompt_function": "covid_dialogue"}
|
126 |
+
{"name": "empathetic_dialogues", "hf_repo": "lighteval/empathetic_dialogues", "hf_subset": "default","hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 50, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "empathetic_dialogue"}
|
127 |
{"name": "disinformation_wedging", "hf_repo": "lighteval/disinformation", "hf_subset": "wedging", "hf_avail_splits": ["validation"], "evaluation_splits": ["validation"], "generation_size": 90, "stop_sequence": ["\nTweet", "\nReason"], "metric": ["disinformation", "bias", "toxicity"], "suite": ["helm"], "prompt_function": "disinformation_wedging"}
|
128 |
{"name": "disinformation_climate", "hf_repo": "lighteval/disinformation", "hf_subset": "reiteration_climate", "hf_avail_splits": ["train", "validation"], "evaluation_splits": ["validation"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["disinformation", "bias", "toxicity"], "suite": ["helm"], "prompt_function": "disinformation_reiteration"}
|
129 |
{"name": "disinformation_covid", "hf_repo": "lighteval/disinformation", "hf_subset": "reiteration_covid", "hf_avail_splits": ["train", "validation"], "evaluation_splits": ["validation"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["disinformation", "bias", "toxicity"], "suite": ["helm"], "prompt_function": "disinformation_reiteration"}
|
130 |
+
{"name": "entity_data_imputation_Buy", "hf_repo": "lighteval/Buy", "hf_subset": "default","hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "entity_data_imputation"}
|
131 |
+
{"name": "entity_data_imputation_Restaurant", "hf_repo": "lighteval/Restaurant", "hf_subset": "default","hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "entity_data_imputation"}
|
132 |
{"name": "entity_matching_Beer", "hf_repo": "lighteval/EntityMatching", "hf_subset": "Beer", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "entity_matching"}
|
133 |
{"name": "entity_matching_iTunes_Amazon", "hf_repo": "lighteval/EntityMatching", "hf_subset": "iTunes_Amazon", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "entity_matching"}
|
134 |
{"name": "entity_matching_Fodors_Zagats", "hf_repo": "lighteval/EntityMatching", "hf_subset": "Fodors_Zagats", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "entity_matching"}
|
|
|
143 |
{"name": "entity_matching_Dirty_DBLP_GoogleScholar", "hf_repo": "lighteval/EntityMatching", "hf_subset": "Dirty_DBLP_GoogleScholar", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "entity_matching"}
|
144 |
{"name": "entity_matching_Dirty_Walmart_Amazon", "hf_repo": "lighteval/EntityMatching", "hf_subset": "Dirty_Walmart_Amazon", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "entity_matching"}
|
145 |
{"name": "gsm8k", "hf_repo": "gsm8k", "hf_subset": "main", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 400, "stop_sequence": ["\n\n"], "metric": ["exact_match_indicator", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "gsm8k_helm"}
|
146 |
+
{"name": "imdb", "hf_repo": "lighteval/IMDB_helm", "hf_subset": "default","hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "imdb"}
|
147 |
+
{"name": "imdb_contrastset", "hf_repo": "lighteval/IMDB_helm", "hf_subset": "default","hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "imdb_contrastset"}
|
148 |
{"name": "interactive_qa_mmlu_college_chemistry", "hf_repo": "hendrycks_test", "hf_subset": "college_chemistry", "hf_avail_splits": ["dev", "test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "interactive_qa_mmlu_scenario"], "prompt_function": "mmlu_qa_college_chemistry"}
|
149 |
{"name": "interactive_qa_mmlu_global_facts", "hf_repo": "hendrycks_test", "hf_subset": "global_facts", "hf_avail_splits": ["dev", "test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "interactive_qa_mmlu_scenario"], "prompt_function": "mmlu_qa_global_facts"}
|
150 |
{"name": "interactive_qa_mmlu_miscellaneous", "hf_repo": "hendrycks_test", "hf_subset": "miscellaneous", "hf_avail_splits": ["dev", "test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm", "interactive_qa_mmlu_scenario"], "prompt_function": "mmlu_qa_miscellaneous"}
|
|
|
153 |
{"name": "legal_summarization_billsum", "hf_repo": "lighteval/legal_summarization", "hf_subset": "billsum", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 1024, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "rouge_1", "rouge_2", "rouge_l", "faithfulness", "extractiveness", "bert_score"], "suite": ["helm"], "prompt_function": "legal_summarization"}
|
154 |
{"name": "legal_summarization_eurlexsum", "hf_repo": "lighteval/legal_summarization", "hf_subset": "eurlexsum", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 2048, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "rouge_1", "rouge_2", "rouge_l", "faithfulness", "extractiveness", "bert_score"], "suite": ["helm"], "prompt_function": "legal_summarization"}
|
155 |
{"name": "legal_summarization_multilexsum", "hf_repo": "lighteval/legal_summarization", "hf_subset": "multilexsum", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 256, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "rouge_1", "rouge_2", "rouge_l", "faithfulness", "extractiveness", "bert_score"], "suite": ["helm"], "prompt_function": "multilexsum"}
|
156 |
+
{"name": "legalsupport", "hf_repo": "lighteval/LegalSupport", "hf_subset": "default","hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "legal_support"}
|
157 |
{"name": "lexglue_ecthr_a", "hf_repo": "lighteval/lexglue", "hf_subset": "ecthr_a", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 20, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score"], "suite": ["helm", "lex_glue_scenario"], "prompt_function": "lex_glue_ecthr_a"}
|
158 |
{"name": "lexglue_ecthr_b", "hf_repo": "lighteval/lexglue", "hf_subset": "ecthr_b", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 20, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score"], "suite": ["helm", "lex_glue_scenario"], "prompt_function": "lex_glue_ecthr_b"}
|
159 |
{"name": "lexglue_scotus", "hf_repo": "lighteval/lexglue", "hf_subset": "scotus", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score"], "suite": ["helm", "lex_glue_scenario"], "prompt_function": "lex_glue_scotus"}
|
|
|
191 |
{"name": "math_number_theory", "hf_repo": "lighteval/math", "hf_subset": "number_theory", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 20, "stop_sequence": ["$"], "metric": ["toxicity", "bias", "math"], "suite": ["helm", "math_scenario"], "prompt_function": "math_helm"}
|
192 |
{"name": "math_prealgebra", "hf_repo": "lighteval/math", "hf_subset": "prealgebra", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 20, "stop_sequence": ["$"], "metric": ["toxicity", "bias", "math"], "suite": ["helm", "math_scenario"], "prompt_function": "math_helm"}
|
193 |
{"name": "math_precalculus", "hf_repo": "lighteval/math", "hf_subset": "precalculus", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 20, "stop_sequence": ["$"], "metric": ["toxicity", "bias", "math"], "suite": ["helm", "math_scenario"], "prompt_function": "math_helm"}
|
194 |
+
{"name": "me_q_sum", "hf_repo": "lighteval/me_q_sum", "hf_subset": "default","hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 128, "stop_sequence":["###"], "metric": ["exact_match", "quasi_exact_match", "f1_score", "rouge_l", "bleu_1", "bleu_4", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "me_q_sum"}
|
195 |
{"name": "med_dialog_healthcaremagic", "hf_repo": "lighteval/med_dialog", "hf_subset": "healthcaremagic", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 128, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score", "rouge_l", "bleu_1", "bleu_4", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "med_dialog"}
|
196 |
{"name": "med_dialog_icliniq", "hf_repo": "lighteval/med_dialog", "hf_subset": "icliniq", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 128, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score", "rouge_l", "bleu_1", "bleu_4", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "med_dialog"}
|
197 |
+
{"name": "med_mcqa", "hf_repo": "lighteval/med_mcqa", "hf_subset": "default","hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "med_mcqa"}
|
198 |
+
{"name": "med_paragraph_simplification", "hf_repo": "lighteval/med_paragraph_simplification", "hf_subset": "default","hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 512, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score", "rouge_l", "bleu_1", "bleu_4", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "med_paragraph_simplification"}
|
199 |
{"name": "med_qa", "hf_repo": "bigbio/med_qa", "hf_subset": "med_qa_en_source", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "med_qa"}
|
200 |
+
{"name": "mmlu", "hf_repo": "cais/mmlu", "hf_subset": "default","hf_avail_splits": ["auxiliary_train", "test", "validation", "dev"], "evaluation_splits": ["validation", "test"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "mmlu"}
|
201 |
{"name": "ms_marco_regular", "hf_repo": "lighteval/ms_marco", "hf_subset": "regular", "hf_avail_splits": ["train", "validation"], "evaluation_splits": ["validation"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["trec"], "suite": ["helm"], "prompt_function": "ms_marco"}
|
202 |
{"name": "ms_marco_trec", "hf_repo": "lighteval/ms_marco", "hf_subset": "trec", "hf_avail_splits": ["train", "validation"], "evaluation_splits": ["validation"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["trec"], "suite": ["helm"], "prompt_function": "ms_marco"}
|
203 |
+
{"name": "narrativeqa", "hf_repo": "narrativeqa", "hf_subset": "default","hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score", "rouge_l", "bleu_1", "bleu_4", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "narrativeqa"}
|
204 |
{"name": "natural_qa_openbook_wiki", "hf_repo": "lighteval/natural_questions", "hf_subset": "default", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 300, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "natural_qa_openbook_wiki"}
|
205 |
{"name": "natural_qa_openbook_longans", "hf_repo": "lighteval/natural_questions", "hf_subset": "default", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 300, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "natural_qa_openbook_longans"}
|
206 |
{"name": "natural_qa_closedbook", "hf_repo": "lighteval/natural_questions", "hf_subset": "default", "hf_avail_splits": ["train", "validation"], "evaluation_splits": ["validation"], "generation_size": 300, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "natural_qa_closedbook"}
|
207 |
+
{"name": "newsqa", "hf_repo": "lighteval/newsqa", "hf_subset": "default","hf_avail_splits": ["train", "validation"], "evaluation_splits": ["validation"], "generation_size": 50, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score", "bias", "toxicity"], "suite": ["helm"], "prompt_function": "newsqa"}
|
208 |
{"name": "numeracy_linear_example", "hf_repo": "lighteval/numeracy", "hf_subset": "linear_example", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 20, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "absolute_value_difference"], "suite": ["helm"], "prompt_function": "numeracy"}
|
209 |
{"name": "numeracy_linear_standard", "hf_repo": "lighteval/numeracy", "hf_subset": "linear_standard", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 20, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "absolute_value_difference"], "suite": ["helm"], "prompt_function": "numeracy"}
|
210 |
{"name": "numeracy_parabola_example", "hf_repo": "lighteval/numeracy", "hf_subset": "parabola_example", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 20, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "absolute_value_difference"], "suite": ["helm"], "prompt_function": "numeracy"}
|
|
|
214 |
{"name": "numeracy_paraboloid_example", "hf_repo": "lighteval/numeracy", "hf_subset": "paraboloid_example", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 20, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "absolute_value_difference"], "suite": ["helm"], "prompt_function": "numeracy"}
|
215 |
{"name": "numeracy_paraboloid_standard", "hf_repo": "lighteval/numeracy", "hf_subset": "paraboloid_standard", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 20, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "absolute_value_difference"], "suite": ["helm"], "prompt_function": "numeracy"}
|
216 |
{"name": "pubmed_qa", "hf_repo": "pubmed_qa", "hf_subset": "pqa_labeled", "hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 1, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "pubmed_qa_helm"}
|
217 |
+
{"name": "quac", "hf_repo": "quac", "hf_subset": "default","hf_avail_splits": ["train", "validation"], "evaluation_splits": ["validation"], "generation_size": 100, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "f1_score", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "quac"}
|
218 |
{"name": "raft_ade_corpus_v2", "hf_repo": "ought/raft", "hf_subset": "ade_corpus_v2", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 30, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "raft"}
|
219 |
{"name": "raft_banking_77", "hf_repo": "ought/raft", "hf_subset": "banking_77", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 30, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "raft"}
|
220 |
{"name": "raft_neurips_impact_statement_risks", "hf_repo": "ought/raft", "hf_subset": "neurips_impact_statement_risks", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 30, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "raft"}
|
|
|
226 |
{"name": "raft_terms_of_service", "hf_repo": "ought/raft", "hf_subset": "terms_of_service", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 30, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "raft"}
|
227 |
{"name": "raft_tweet_eval_hate", "hf_repo": "ought/raft", "hf_subset": "tweet_eval_hate", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 30, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "raft"}
|
228 |
{"name": "raft_twitter_complaints", "hf_repo": "ought/raft", "hf_subset": "twitter_complaints", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 30, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "raft"}
|
229 |
+
{"name": "real_toxicity_prompts", "hf_repo": "allenai/real-toxicity-prompts", "hf_subset": "default","hf_avail_splits": ["train"], "evaluation_splits": ["train"], "generation_size": 20, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "prediction_perplexity"], "suite": ["helm"], "prompt_function": "real_toxicity_prompts"}
|
230 |
{"name": "summarization_xsum", "hf_repo": "lighteval/summarization", "hf_subset": "xsum", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 64, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "rouge_1", "rouge_2", "rouge_l", "faithfulness", "extractiveness", "bert_score"], "suite": ["helm"], "prompt_function": "xsum"}
|
231 |
{"name": "summarization_xsum-sampled", "hf_repo": "lighteval/summarization", "hf_subset": "xsum-sampled", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 64, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "rouge_1", "rouge_2", "rouge_l", "faithfulness", "extractiveness", "bert_score"], "suite": ["helm"], "prompt_function": "xsum"}
|
232 |
{"name": "summarization_cnn-dm", "hf_repo": "lighteval/summarization", "hf_subset": "cnn-dm", "hf_avail_splits": ["train", "test", "validation"], "evaluation_splits": ["validation", "test"], "generation_size": 128, "stop_sequence": ["\n"], "metric": ["bias", "toxicity", "rouge_1", "rouge_2", "rouge_l", "faithfulness", "extractiveness", "bert_score"], "suite": ["helm"], "prompt_function": "cnn_dm"}
|
|
|
257 |
{"name": "the_pile_ubuntu-irc", "hf_repo": "pile_helm", "hf_subset": "ubuntu-irc", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"}
|
258 |
{"name": "the_pile_bookcorpus", "hf_repo": "pile_helm", "hf_subset": "bookcorpus", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"}
|
259 |
{"name": "the_pile_philpapers", "hf_repo": "pile_helm", "hf_subset": "philpapers", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "the_pile"}
|
260 |
+
{"name": "truthfulqa", "hf_repo": "lighteval/truthfulqa_helm", "hf_subset": "default","hf_avail_splits": ["train", "validation"], "evaluation_splits": ["validation"], "generation_size": 5, "stop_sequence": ["\n"], "metric": ["loglikelihood_acc", "exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match"], "suite": ["helm"], "prompt_function": "truthful_qa_helm"}
|
261 |
{"name": "twitterAAE_aa", "hf_repo": "lighteval/twitterAAE", "hf_subset": "aa", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "twitter_aae"}
|
262 |
{"name": "twitterAAE_white", "hf_repo": "lighteval/twitterAAE", "hf_subset": "white", "hf_avail_splits": ["test"], "evaluation_splits": ["test"], "generation_size": -1, "stop_sequence": ["\n"], "metric": ["perplexity"], "suite": ["helm"], "prompt_function": "twitter_aae"}
|
263 |
{"name": "wikifact_genre", "hf_repo": "lighteval/wikifact", "hf_subset": "genre", "hf_avail_splits": ["train", "test"], "evaluation_splits": ["test"], "generation_size": 8, "stop_sequence": ["\n"], "metric": ["exact_match", "quasi_exact_match", "prefix_exact_match", "quasi_prefix_exact_match", "toxicity", "bias"], "suite": ["helm"], "prompt_function": "wikifact"}
|