diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/_mmlu_flan_generative_template_yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/_mmlu_flan_generative_template_yaml new file mode 100644 index 0000000000000000000000000000000000000000..5b190b8d12e0f3e3fa0f8ca5a0a3a7fe2fd28d40 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/_mmlu_flan_generative_template_yaml @@ -0,0 +1,30 @@ +group: mmlu_flan_n_shot_generative +dataset_path: hails/mmlu_no_train # a copy of `cais/mmlu` with no auxiliary_train split +test_split: test +fewshot_split: dev +output_type: generate_until +doc_to_text: "Q: {{question.strip()}}\n(A) {{choices[0]}} (B) {{choices[1]}} (C) {{choices[2]}} (D) {{choices[3]}}\nA: " +doc_to_target: "{{['(A)', '(B)', '(C)', '(D)'][answer]}}" +filter_list: + - name: "strict-match" + filter: + - function: "take_first" + - name: "flexible-extract" + filter: + - function: !function utils.MultiChoiceRegexFilter + group_select: 0 + regex_pattern: "(\\([A-Z]\\))" + ignore_case: true + ignore_punctuation: true + - function: "take_first" +generation_kwargs: + until: + - "" + - "Q:" + - "<|im_end|>" +metric_list: + - metric: exact_match + aggregation: mean + higher_is_better: true +metadata: + version: 1.0 diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_college_biology.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_college_biology.yaml new file mode 100644 index 0000000000000000000000000000000000000000..3772b0e6411ec48f7ee0b68fe15ba7288cfde811 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_college_biology.yaml @@ -0,0 +1,6 @@ +"dataset_name": "college_biology" +"description": "The following are multiple choice questions (with answers) about college\ + \ biology.\n\n" +"group": "mmlu_flan_n_shot_generative_stem" +"include": "_mmlu_flan_generative_template_yaml" +"task": "mmlu_flan_n_shot_generative_college_biology" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_college_chemistry.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_college_chemistry.yaml new file mode 100644 index 0000000000000000000000000000000000000000..cedcf0cc2ffc855be657dc7fa2869b6c3a4e9c76 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_college_chemistry.yaml @@ -0,0 +1,6 @@ +"dataset_name": "college_chemistry" +"description": "The following are multiple choice questions (with answers) about college\ + \ chemistry.\n\n" +"group": "mmlu_flan_n_shot_generative_stem" +"include": "_mmlu_flan_generative_template_yaml" +"task": "mmlu_flan_n_shot_generative_college_chemistry" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_college_computer_science.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_college_computer_science.yaml new file mode 100644 index 0000000000000000000000000000000000000000..a060903a97c31f701fb9a2158df072120d38e46a --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_college_computer_science.yaml @@ -0,0 +1,6 @@ +"dataset_name": "college_computer_science" +"description": "The following are multiple choice questions (with answers) about college\ + \ computer science.\n\n" +"group": "mmlu_flan_n_shot_generative_stem" +"include": "_mmlu_flan_generative_template_yaml" +"task": "mmlu_flan_n_shot_generative_college_computer_science" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_college_physics.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_college_physics.yaml new file mode 100644 index 0000000000000000000000000000000000000000..3b1e64e640ba8ab8625f02d3636b24770caac001 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_college_physics.yaml @@ -0,0 +1,6 @@ +"dataset_name": "college_physics" +"description": "The following are multiple choice questions (with answers) about college\ + \ physics.\n\n" +"group": "mmlu_flan_n_shot_generative_stem" +"include": "_mmlu_flan_generative_template_yaml" +"task": "mmlu_flan_n_shot_generative_college_physics" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_econometrics.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_econometrics.yaml new file mode 100644 index 0000000000000000000000000000000000000000..d9b4ebfcdd5d23fc606e50bce368c0b4db545428 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_econometrics.yaml @@ -0,0 +1,6 @@ +"dataset_name": "econometrics" +"description": "The following are multiple choice questions (with answers) about econometrics.\n\ + \n" +"group": "mmlu_flan_n_shot_generative_social_sciences" +"include": "_mmlu_flan_generative_template_yaml" +"task": "mmlu_flan_n_shot_generative_econometrics" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_formal_logic.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_formal_logic.yaml new file mode 100644 index 0000000000000000000000000000000000000000..1c2ad3a1b6fc4f5f407e13645b77cb4da61d403d --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_formal_logic.yaml @@ -0,0 +1,6 @@ +"dataset_name": "formal_logic" +"description": "The following are multiple choice questions (with answers) about formal\ + \ logic.\n\n" +"group": "mmlu_flan_n_shot_generative_humanities" +"include": "_mmlu_flan_generative_template_yaml" +"task": "mmlu_flan_n_shot_generative_formal_logic" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_high_school_biology.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_high_school_biology.yaml new file mode 100644 index 0000000000000000000000000000000000000000..0b51f34a90b4dff6465b37d39d5f5dab7b2866c6 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_high_school_biology.yaml @@ -0,0 +1,6 @@ +"dataset_name": "high_school_biology" +"description": "The following are multiple choice questions (with answers) about high\ + \ school biology.\n\n" +"group": "mmlu_flan_n_shot_generative_stem" +"include": "_mmlu_flan_generative_template_yaml" +"task": "mmlu_flan_n_shot_generative_high_school_biology" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_high_school_chemistry.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_high_school_chemistry.yaml new file mode 100644 index 0000000000000000000000000000000000000000..0066ba7716c6316d911d273cdffd0a21fb019bdf --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_high_school_chemistry.yaml @@ -0,0 +1,6 @@ +"dataset_name": "high_school_chemistry" +"description": "The following are multiple choice questions (with answers) about high\ + \ school chemistry.\n\n" +"group": "mmlu_flan_n_shot_generative_stem" +"include": "_mmlu_flan_generative_template_yaml" +"task": "mmlu_flan_n_shot_generative_high_school_chemistry" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_high_school_macroeconomics.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_high_school_macroeconomics.yaml new file mode 100644 index 0000000000000000000000000000000000000000..71d822595f4699348f1aca2d47cb0bd9529d5e6f --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_high_school_macroeconomics.yaml @@ -0,0 +1,6 @@ +"dataset_name": "high_school_macroeconomics" +"description": "The following are multiple choice questions (with answers) about high\ + \ school macroeconomics.\n\n" +"group": "mmlu_flan_n_shot_generative_social_sciences" +"include": "_mmlu_flan_generative_template_yaml" +"task": "mmlu_flan_n_shot_generative_high_school_macroeconomics" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_high_school_mathematics.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_high_school_mathematics.yaml new file mode 100644 index 0000000000000000000000000000000000000000..20d31e12cce9944cc53e914301dcb44984b7674a --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_high_school_mathematics.yaml @@ -0,0 +1,6 @@ +"dataset_name": "high_school_mathematics" +"description": "The following are multiple choice questions (with answers) about high\ + \ school mathematics.\n\n" +"group": "mmlu_flan_n_shot_generative_stem" +"include": "_mmlu_flan_generative_template_yaml" +"task": "mmlu_flan_n_shot_generative_high_school_mathematics" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_high_school_microeconomics.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_high_school_microeconomics.yaml new file mode 100644 index 0000000000000000000000000000000000000000..5c6d6ef9f6ab6faf79e324652ea22a7bce2f84e7 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_high_school_microeconomics.yaml @@ -0,0 +1,6 @@ +"dataset_name": "high_school_microeconomics" +"description": "The following are multiple choice questions (with answers) about high\ + \ school microeconomics.\n\n" +"group": "mmlu_flan_n_shot_generative_social_sciences" +"include": "_mmlu_flan_generative_template_yaml" +"task": "mmlu_flan_n_shot_generative_high_school_microeconomics" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_high_school_world_history.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_high_school_world_history.yaml new file mode 100644 index 0000000000000000000000000000000000000000..dfb839c314246b34607c3a68038ce71a64b1ff96 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_high_school_world_history.yaml @@ -0,0 +1,6 @@ +"dataset_name": "high_school_world_history" +"description": "The following are multiple choice questions (with answers) about high\ + \ school world history.\n\n" +"group": "mmlu_flan_n_shot_generative_humanities" +"include": "_mmlu_flan_generative_template_yaml" +"task": "mmlu_flan_n_shot_generative_high_school_world_history" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_human_aging.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_human_aging.yaml new file mode 100644 index 0000000000000000000000000000000000000000..a857698f7770454d7ad9e7bd402a5a2f2a921af6 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_human_aging.yaml @@ -0,0 +1,6 @@ +"dataset_name": "human_aging" +"description": "The following are multiple choice questions (with answers) about human\ + \ aging.\n\n" +"group": "mmlu_flan_n_shot_generative_other" +"include": "_mmlu_flan_generative_template_yaml" +"task": "mmlu_flan_n_shot_generative_human_aging" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_logical_fallacies.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_logical_fallacies.yaml new file mode 100644 index 0000000000000000000000000000000000000000..c6363390211d1ca822c3299b2bbd2e7fcb36fff4 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_logical_fallacies.yaml @@ -0,0 +1,6 @@ +"dataset_name": "logical_fallacies" +"description": "The following are multiple choice questions (with answers) about logical\ + \ fallacies.\n\n" +"group": "mmlu_flan_n_shot_generative_humanities" +"include": "_mmlu_flan_generative_template_yaml" +"task": "mmlu_flan_n_shot_generative_logical_fallacies" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_marketing.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_marketing.yaml new file mode 100644 index 0000000000000000000000000000000000000000..0716dc144e3d01f2991c7126a2d47c53deb52bc9 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_marketing.yaml @@ -0,0 +1,6 @@ +"dataset_name": "marketing" +"description": "The following are multiple choice questions (with answers) about marketing.\n\ + \n" +"group": "mmlu_flan_n_shot_generative_other" +"include": "_mmlu_flan_generative_template_yaml" +"task": "mmlu_flan_n_shot_generative_marketing" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_medical_genetics.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_medical_genetics.yaml new file mode 100644 index 0000000000000000000000000000000000000000..92115979b24e64d2314808276bd2772247570dd6 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_medical_genetics.yaml @@ -0,0 +1,6 @@ +"dataset_name": "medical_genetics" +"description": "The following are multiple choice questions (with answers) about medical\ + \ genetics.\n\n" +"group": "mmlu_flan_n_shot_generative_other" +"include": "_mmlu_flan_generative_template_yaml" +"task": "mmlu_flan_n_shot_generative_medical_genetics" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_moral_disputes.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_moral_disputes.yaml new file mode 100644 index 0000000000000000000000000000000000000000..58bf43dfa819e83af629cc17b33ac6b49a5404dd --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_moral_disputes.yaml @@ -0,0 +1,6 @@ +"dataset_name": "moral_disputes" +"description": "The following are multiple choice questions (with answers) about moral\ + \ disputes.\n\n" +"group": "mmlu_flan_n_shot_generative_humanities" +"include": "_mmlu_flan_generative_template_yaml" +"task": "mmlu_flan_n_shot_generative_moral_disputes" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_nutrition.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_nutrition.yaml new file mode 100644 index 0000000000000000000000000000000000000000..df14da9daf3bae25c95f685d06eba25fe6e740c5 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_nutrition.yaml @@ -0,0 +1,6 @@ +"dataset_name": "nutrition" +"description": "The following are multiple choice questions (with answers) about nutrition.\n\ + \n" +"group": "mmlu_flan_n_shot_generative_other" +"include": "_mmlu_flan_generative_template_yaml" +"task": "mmlu_flan_n_shot_generative_nutrition" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_prehistory.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_prehistory.yaml new file mode 100644 index 0000000000000000000000000000000000000000..3695e77055f40403a4aa384ddb20084bb5302359 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_prehistory.yaml @@ -0,0 +1,6 @@ +"dataset_name": "prehistory" +"description": "The following are multiple choice questions (with answers) about prehistory.\n\ + \n" +"group": "mmlu_flan_n_shot_generative_humanities" +"include": "_mmlu_flan_generative_template_yaml" +"task": "mmlu_flan_n_shot_generative_prehistory" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_professional_medicine.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_professional_medicine.yaml new file mode 100644 index 0000000000000000000000000000000000000000..c420d0d3607a30e7edf13bfa2afb36dd3bce364b --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_professional_medicine.yaml @@ -0,0 +1,6 @@ +"dataset_name": "professional_medicine" +"description": "The following are multiple choice questions (with answers) about professional\ + \ medicine.\n\n" +"group": "mmlu_flan_n_shot_generative_other" +"include": "_mmlu_flan_generative_template_yaml" +"task": "mmlu_flan_n_shot_generative_professional_medicine" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_us_foreign_policy.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_us_foreign_policy.yaml new file mode 100644 index 0000000000000000000000000000000000000000..5c51472532d22ee6706847d028cb711c92356431 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/generative/mmlu_us_foreign_policy.yaml @@ -0,0 +1,6 @@ +"dataset_name": "us_foreign_policy" +"description": "The following are multiple choice questions (with answers) about us\ + \ foreign policy.\n\n" +"group": "mmlu_flan_n_shot_generative_social_sciences" +"include": "_mmlu_flan_generative_template_yaml" +"task": "mmlu_flan_n_shot_generative_us_foreign_policy" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/_mmlu.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/_mmlu.yaml new file mode 100644 index 0000000000000000000000000000000000000000..43c5f0076a69e39f138040880fde5c107d4535f6 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/_mmlu.yaml @@ -0,0 +1,6 @@ +group: mmlu_flan_n_shot_loglikelihood +task: + - mmlu_flan_n_shot_loglikelihood_stem + - mmlu_flan_n_shot_loglikelihood_other + - mmlu_flan_n_shot_loglikelihood_social_sciences + - mmlu_flan_n_shot_loglikelihood_humanities diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/_mmlu_flan_loglikelihood_template_yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/_mmlu_flan_loglikelihood_template_yaml new file mode 100644 index 0000000000000000000000000000000000000000..4bd5e44e45f3461f5a6840eebbf299ad7f1f3ca5 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/_mmlu_flan_loglikelihood_template_yaml @@ -0,0 +1,16 @@ +dataset_path: hails/mmlu_no_train # a copy of `cais/mmlu` with no auxiliary_train split +test_split: test +fewshot_split: dev +output_type: multiple_choice +doc_to_text: "Q: {{question.strip()}}\n(A) {{choices[0]}} (B) {{choices[1]}} (C) {{choices[2]}} (D) {{choices[3]}}\nA: " +doc_to_choice: ["(A)", "(B)", "(C)", "(D)"] +doc_to_target: answer +metric_list: + - metric: acc + aggregation: mean + higher_is_better: true + - metric: acc_norm + aggregation: mean + higher_is_better: true +metadata: + version: 0.0 diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_abstract_algebra.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_abstract_algebra.yaml new file mode 100644 index 0000000000000000000000000000000000000000..068898e4c3a03c1b895f0cf63e2279193fabfd86 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_abstract_algebra.yaml @@ -0,0 +1,6 @@ +"dataset_name": "abstract_algebra" +"description": "The following are multiple choice questions (with answers) about abstract\ + \ algebra.\n\n" +"group": "mmlu_flan_n_shot_loglikelihood_stem" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_abstract_algebra" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_anatomy.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_anatomy.yaml new file mode 100644 index 0000000000000000000000000000000000000000..db5fa24e5c51723fd77f010298319d8b3f915ce2 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_anatomy.yaml @@ -0,0 +1,6 @@ +"dataset_name": "anatomy" +"description": "The following are multiple choice questions (with answers) about anatomy.\n\ + \n" +"group": "mmlu_flan_n_shot_loglikelihood_stem" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_anatomy" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_astronomy.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_astronomy.yaml new file mode 100644 index 0000000000000000000000000000000000000000..5f71dbcfa10f829b6514d652933b2cc94eb77bd2 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_astronomy.yaml @@ -0,0 +1,6 @@ +"dataset_name": "astronomy" +"description": "The following are multiple choice questions (with answers) about astronomy.\n\ + \n" +"group": "mmlu_flan_n_shot_loglikelihood_stem" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_astronomy" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_business_ethics.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_business_ethics.yaml new file mode 100644 index 0000000000000000000000000000000000000000..54dc204d2431face1dcd41235b8bb3679dff3496 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_business_ethics.yaml @@ -0,0 +1,6 @@ +"dataset_name": "business_ethics" +"description": "The following are multiple choice questions (with answers) about business\ + \ ethics.\n\n" +"group": "mmlu_flan_n_shot_loglikelihood_other" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_business_ethics" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_clinical_knowledge.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_clinical_knowledge.yaml new file mode 100644 index 0000000000000000000000000000000000000000..121b3c22efeebfcb3fe06839f8fec1d43a6a9831 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_clinical_knowledge.yaml @@ -0,0 +1,6 @@ +"dataset_name": "clinical_knowledge" +"description": "The following are multiple choice questions (with answers) about clinical\ + \ knowledge.\n\n" +"group": "mmlu_flan_n_shot_loglikelihood_other" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_clinical_knowledge" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_college_biology.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_college_biology.yaml new file mode 100644 index 0000000000000000000000000000000000000000..cadb6fb75d42815018b3f7fe241d25e6ee8021ac --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_college_biology.yaml @@ -0,0 +1,6 @@ +"dataset_name": "college_biology" +"description": "The following are multiple choice questions (with answers) about college\ + \ biology.\n\n" +"group": "mmlu_flan_n_shot_loglikelihood_stem" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_college_biology" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_college_computer_science.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_college_computer_science.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e4bdbdd67206016451228bbfa7b318279f9b43dc --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_college_computer_science.yaml @@ -0,0 +1,6 @@ +"dataset_name": "college_computer_science" +"description": "The following are multiple choice questions (with answers) about college\ + \ computer science.\n\n" +"group": "mmlu_flan_n_shot_loglikelihood_stem" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_college_computer_science" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_college_mathematics.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_college_mathematics.yaml new file mode 100644 index 0000000000000000000000000000000000000000..17d0cb541babb66924e32c17b92a13679116f851 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_college_mathematics.yaml @@ -0,0 +1,6 @@ +"dataset_name": "college_mathematics" +"description": "The following are multiple choice questions (with answers) about college\ + \ mathematics.\n\n" +"group": "mmlu_flan_n_shot_loglikelihood_stem" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_college_mathematics" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_college_medicine.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_college_medicine.yaml new file mode 100644 index 0000000000000000000000000000000000000000..c45a6c9c138cbe906aa7a9207f472b1b92d8522d --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_college_medicine.yaml @@ -0,0 +1,6 @@ +"dataset_name": "college_medicine" +"description": "The following are multiple choice questions (with answers) about college\ + \ medicine.\n\n" +"group": "mmlu_flan_n_shot_loglikelihood_other" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_college_medicine" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_computer_security.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_computer_security.yaml new file mode 100644 index 0000000000000000000000000000000000000000..5b0a75ff1e2981e4741a2dde05f0663cd10aea1d --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_computer_security.yaml @@ -0,0 +1,6 @@ +"dataset_name": "computer_security" +"description": "The following are multiple choice questions (with answers) about computer\ + \ security.\n\n" +"group": "mmlu_flan_n_shot_loglikelihood_stem" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_computer_security" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_conceptual_physics.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_conceptual_physics.yaml new file mode 100644 index 0000000000000000000000000000000000000000..94b7eaf62fecff26bf515294f5d36c2887304baa --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_conceptual_physics.yaml @@ -0,0 +1,6 @@ +"dataset_name": "conceptual_physics" +"description": "The following are multiple choice questions (with answers) about conceptual\ + \ physics.\n\n" +"group": "mmlu_flan_n_shot_loglikelihood_stem" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_conceptual_physics" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_econometrics.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_econometrics.yaml new file mode 100644 index 0000000000000000000000000000000000000000..146d4847d816e546c0ec817cd2e236f453d6bae1 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_econometrics.yaml @@ -0,0 +1,6 @@ +"dataset_name": "econometrics" +"description": "The following are multiple choice questions (with answers) about econometrics.\n\ + \n" +"group": "mmlu_flan_n_shot_loglikelihood_social_sciences" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_econometrics" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_electrical_engineering.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_electrical_engineering.yaml new file mode 100644 index 0000000000000000000000000000000000000000..61cb27e22855e169d30f7fdcd71a18f42afc1ce3 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_electrical_engineering.yaml @@ -0,0 +1,6 @@ +"dataset_name": "electrical_engineering" +"description": "The following are multiple choice questions (with answers) about electrical\ + \ engineering.\n\n" +"group": "mmlu_flan_n_shot_loglikelihood_stem" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_electrical_engineering" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_elementary_mathematics.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_elementary_mathematics.yaml new file mode 100644 index 0000000000000000000000000000000000000000..39e10f856c003cc0c382a088278c2a44ee0ad92a --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_elementary_mathematics.yaml @@ -0,0 +1,6 @@ +"dataset_name": "elementary_mathematics" +"description": "The following are multiple choice questions (with answers) about elementary\ + \ mathematics.\n\n" +"group": "mmlu_flan_n_shot_loglikelihood_stem" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_elementary_mathematics" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_formal_logic.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_formal_logic.yaml new file mode 100644 index 0000000000000000000000000000000000000000..7fb8aa923735d58bc22d32f93f556cfe54cd66af --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_formal_logic.yaml @@ -0,0 +1,6 @@ +"dataset_name": "formal_logic" +"description": "The following are multiple choice questions (with answers) about formal\ + \ logic.\n\n" +"group": "mmlu_flan_n_shot_loglikelihood_humanities" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_formal_logic" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_global_facts.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_global_facts.yaml new file mode 100644 index 0000000000000000000000000000000000000000..5ffc9069ac1e7ee42eada962ee7c7a5146b05be6 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_global_facts.yaml @@ -0,0 +1,6 @@ +"dataset_name": "global_facts" +"description": "The following are multiple choice questions (with answers) about global\ + \ facts.\n\n" +"group": "mmlu_flan_n_shot_loglikelihood_other" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_global_facts" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_high_school_biology.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_high_school_biology.yaml new file mode 100644 index 0000000000000000000000000000000000000000..328b47f8bc141d6f465f38ebd634c1dece5d0269 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_high_school_biology.yaml @@ -0,0 +1,6 @@ +"dataset_name": "high_school_biology" +"description": "The following are multiple choice questions (with answers) about high\ + \ school biology.\n\n" +"group": "mmlu_flan_n_shot_loglikelihood_stem" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_high_school_biology" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_high_school_chemistry.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_high_school_chemistry.yaml new file mode 100644 index 0000000000000000000000000000000000000000..350583752e02786d1f21eccb8118ca9c8f0e1af8 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_high_school_chemistry.yaml @@ -0,0 +1,6 @@ +"dataset_name": "high_school_chemistry" +"description": "The following are multiple choice questions (with answers) about high\ + \ school chemistry.\n\n" +"group": "mmlu_flan_n_shot_loglikelihood_stem" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_high_school_chemistry" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_high_school_computer_science.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_high_school_computer_science.yaml new file mode 100644 index 0000000000000000000000000000000000000000..cd2e1285a9b53895c766e23ea33a859d8fa81218 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_high_school_computer_science.yaml @@ -0,0 +1,6 @@ +"dataset_name": "high_school_computer_science" +"description": "The following are multiple choice questions (with answers) about high\ + \ school computer science.\n\n" +"group": "mmlu_flan_n_shot_loglikelihood_stem" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_high_school_computer_science" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_high_school_european_history.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_high_school_european_history.yaml new file mode 100644 index 0000000000000000000000000000000000000000..62f9465f9253dc46df99a6fa3495c9987de4ce85 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_high_school_european_history.yaml @@ -0,0 +1,6 @@ +"dataset_name": "high_school_european_history" +"description": "The following are multiple choice questions (with answers) about high\ + \ school european history.\n\n" +"group": "mmlu_flan_n_shot_loglikelihood_humanities" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_high_school_european_history" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_high_school_geography.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_high_school_geography.yaml new file mode 100644 index 0000000000000000000000000000000000000000..c2e8d83f450497a1c2abf1dbc520ce44e31ff199 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_high_school_geography.yaml @@ -0,0 +1,6 @@ +"dataset_name": "high_school_geography" +"description": "The following are multiple choice questions (with answers) about high\ + \ school geography.\n\n" +"group": "mmlu_flan_n_shot_loglikelihood_social_sciences" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_high_school_geography" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_high_school_government_and_politics.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_high_school_government_and_politics.yaml new file mode 100644 index 0000000000000000000000000000000000000000..9b72fb19b7148583e017f51d6df5e66bb45eeb53 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_high_school_government_and_politics.yaml @@ -0,0 +1,6 @@ +"dataset_name": "high_school_government_and_politics" +"description": "The following are multiple choice questions (with answers) about high\ + \ school government and politics.\n\n" +"group": "mmlu_flan_n_shot_loglikelihood_social_sciences" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_high_school_government_and_politics" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_high_school_microeconomics.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_high_school_microeconomics.yaml new file mode 100644 index 0000000000000000000000000000000000000000..d72fc2ac23689e308b41670d5acefb5222509616 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_high_school_microeconomics.yaml @@ -0,0 +1,6 @@ +"dataset_name": "high_school_microeconomics" +"description": "The following are multiple choice questions (with answers) about high\ + \ school microeconomics.\n\n" +"group": "mmlu_flan_n_shot_loglikelihood_social_sciences" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_high_school_microeconomics" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_high_school_physics.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_high_school_physics.yaml new file mode 100644 index 0000000000000000000000000000000000000000..c09b2c1d6255ec21a677d1a3dbf10b71302fda29 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_high_school_physics.yaml @@ -0,0 +1,6 @@ +"dataset_name": "high_school_physics" +"description": "The following are multiple choice questions (with answers) about high\ + \ school physics.\n\n" +"group": "mmlu_flan_n_shot_loglikelihood_stem" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_high_school_physics" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_high_school_statistics.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_high_school_statistics.yaml new file mode 100644 index 0000000000000000000000000000000000000000..bc0c9d1a04c4290e0b283e6c2a53be7212f4f276 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_high_school_statistics.yaml @@ -0,0 +1,6 @@ +"dataset_name": "high_school_statistics" +"description": "The following are multiple choice questions (with answers) about high\ + \ school statistics.\n\n" +"group": "mmlu_flan_n_shot_loglikelihood_stem" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_high_school_statistics" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_high_school_us_history.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_high_school_us_history.yaml new file mode 100644 index 0000000000000000000000000000000000000000..0524e7675be12322aeb336f95ce805b7e322a66b --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_high_school_us_history.yaml @@ -0,0 +1,6 @@ +"dataset_name": "high_school_us_history" +"description": "The following are multiple choice questions (with answers) about high\ + \ school us history.\n\n" +"group": "mmlu_flan_n_shot_loglikelihood_humanities" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_high_school_us_history" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_high_school_world_history.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_high_school_world_history.yaml new file mode 100644 index 0000000000000000000000000000000000000000..8664736255b9f7d55945cb40fd285a49b6a626f3 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_high_school_world_history.yaml @@ -0,0 +1,6 @@ +"dataset_name": "high_school_world_history" +"description": "The following are multiple choice questions (with answers) about high\ + \ school world history.\n\n" +"group": "mmlu_flan_n_shot_loglikelihood_humanities" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_high_school_world_history" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_human_aging.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_human_aging.yaml new file mode 100644 index 0000000000000000000000000000000000000000..9d0a0179e675716b33feb140c268b4926ac6b46d --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_human_aging.yaml @@ -0,0 +1,6 @@ +"dataset_name": "human_aging" +"description": "The following are multiple choice questions (with answers) about human\ + \ aging.\n\n" +"group": "mmlu_flan_n_shot_loglikelihood_other" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_human_aging" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_human_sexuality.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_human_sexuality.yaml new file mode 100644 index 0000000000000000000000000000000000000000..96979867c1ceba5b5a4bf952b70dd37d2d36b233 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_human_sexuality.yaml @@ -0,0 +1,6 @@ +"dataset_name": "human_sexuality" +"description": "The following are multiple choice questions (with answers) about human\ + \ sexuality.\n\n" +"group": "mmlu_flan_n_shot_loglikelihood_social_sciences" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_human_sexuality" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_international_law.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_international_law.yaml new file mode 100644 index 0000000000000000000000000000000000000000..3d6eb6b28fadb9ed0b28487880eda0b6fd9bf4c3 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_international_law.yaml @@ -0,0 +1,6 @@ +"dataset_name": "international_law" +"description": "The following are multiple choice questions (with answers) about international\ + \ law.\n\n" +"group": "mmlu_flan_n_shot_loglikelihood_humanities" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_international_law" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_logical_fallacies.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_logical_fallacies.yaml new file mode 100644 index 0000000000000000000000000000000000000000..b735d0738561226e6922687d7d103a5da4446055 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_logical_fallacies.yaml @@ -0,0 +1,6 @@ +"dataset_name": "logical_fallacies" +"description": "The following are multiple choice questions (with answers) about logical\ + \ fallacies.\n\n" +"group": "mmlu_flan_n_shot_loglikelihood_humanities" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_logical_fallacies" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_machine_learning.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_machine_learning.yaml new file mode 100644 index 0000000000000000000000000000000000000000..fccc7058b5b5598dbe7efebf9f04c484bc071388 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_machine_learning.yaml @@ -0,0 +1,6 @@ +"dataset_name": "machine_learning" +"description": "The following are multiple choice questions (with answers) about machine\ + \ learning.\n\n" +"group": "mmlu_flan_n_shot_loglikelihood_stem" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_machine_learning" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_management.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_management.yaml new file mode 100644 index 0000000000000000000000000000000000000000..a40da661e36759dd9206b7c8eda6217b693ecf22 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_management.yaml @@ -0,0 +1,6 @@ +"dataset_name": "management" +"description": "The following are multiple choice questions (with answers) about management.\n\ + \n" +"group": "mmlu_flan_n_shot_loglikelihood_other" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_management" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_marketing.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_marketing.yaml new file mode 100644 index 0000000000000000000000000000000000000000..3537a86b933669b1f2cce6362184eb6bf61988e7 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_marketing.yaml @@ -0,0 +1,6 @@ +"dataset_name": "marketing" +"description": "The following are multiple choice questions (with answers) about marketing.\n\ + \n" +"group": "mmlu_flan_n_shot_loglikelihood_other" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_marketing" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_medical_genetics.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_medical_genetics.yaml new file mode 100644 index 0000000000000000000000000000000000000000..49247525eadd9ca9e8368d8f2d0606a97d837e0d --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_medical_genetics.yaml @@ -0,0 +1,6 @@ +"dataset_name": "medical_genetics" +"description": "The following are multiple choice questions (with answers) about medical\ + \ genetics.\n\n" +"group": "mmlu_flan_n_shot_loglikelihood_other" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_medical_genetics" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_miscellaneous.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_miscellaneous.yaml new file mode 100644 index 0000000000000000000000000000000000000000..c6aa9bafad16dfc8abcee5a83b701f2e7934a037 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_miscellaneous.yaml @@ -0,0 +1,6 @@ +"dataset_name": "miscellaneous" +"description": "The following are multiple choice questions (with answers) about miscellaneous.\n\ + \n" +"group": "mmlu_flan_n_shot_loglikelihood_other" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_miscellaneous" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_moral_disputes.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_moral_disputes.yaml new file mode 100644 index 0000000000000000000000000000000000000000..4ff46f425aed8740eb3fe349fd5afebb6b079a06 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_moral_disputes.yaml @@ -0,0 +1,6 @@ +"dataset_name": "moral_disputes" +"description": "The following are multiple choice questions (with answers) about moral\ + \ disputes.\n\n" +"group": "mmlu_flan_n_shot_loglikelihood_humanities" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_moral_disputes" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_moral_scenarios.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_moral_scenarios.yaml new file mode 100644 index 0000000000000000000000000000000000000000..cdcc3b01048ca4451f9b554d69b0fe51e549d76b --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_moral_scenarios.yaml @@ -0,0 +1,6 @@ +"dataset_name": "moral_scenarios" +"description": "The following are multiple choice questions (with answers) about moral\ + \ scenarios.\n\n" +"group": "mmlu_flan_n_shot_loglikelihood_humanities" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_moral_scenarios" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_nutrition.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_nutrition.yaml new file mode 100644 index 0000000000000000000000000000000000000000..5773ca1f70b7e6bab5e5c328d62f483ee6ae74be --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_nutrition.yaml @@ -0,0 +1,6 @@ +"dataset_name": "nutrition" +"description": "The following are multiple choice questions (with answers) about nutrition.\n\ + \n" +"group": "mmlu_flan_n_shot_loglikelihood_other" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_nutrition" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_philosophy.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_philosophy.yaml new file mode 100644 index 0000000000000000000000000000000000000000..944b44a14477a42d7662075521a6edeafa778685 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_philosophy.yaml @@ -0,0 +1,6 @@ +"dataset_name": "philosophy" +"description": "The following are multiple choice questions (with answers) about philosophy.\n\ + \n" +"group": "mmlu_flan_n_shot_loglikelihood_humanities" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_philosophy" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_prehistory.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_prehistory.yaml new file mode 100644 index 0000000000000000000000000000000000000000..184a9584176eb47d55ad9fd323e6409a853ee315 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_prehistory.yaml @@ -0,0 +1,6 @@ +"dataset_name": "prehistory" +"description": "The following are multiple choice questions (with answers) about prehistory.\n\ + \n" +"group": "mmlu_flan_n_shot_loglikelihood_humanities" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_prehistory" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_professional_medicine.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_professional_medicine.yaml new file mode 100644 index 0000000000000000000000000000000000000000..68c8dc46da23e0fdff8f8eb57c467849f15e567a --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_professional_medicine.yaml @@ -0,0 +1,6 @@ +"dataset_name": "professional_medicine" +"description": "The following are multiple choice questions (with answers) about professional\ + \ medicine.\n\n" +"group": "mmlu_flan_n_shot_loglikelihood_other" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_professional_medicine" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_security_studies.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_security_studies.yaml new file mode 100644 index 0000000000000000000000000000000000000000..03b27c90cd85d4288145e05ad0f1c219ebda34ed --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_security_studies.yaml @@ -0,0 +1,6 @@ +"dataset_name": "security_studies" +"description": "The following are multiple choice questions (with answers) about security\ + \ studies.\n\n" +"group": "mmlu_flan_n_shot_loglikelihood_social_sciences" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_security_studies" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_sociology.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_sociology.yaml new file mode 100644 index 0000000000000000000000000000000000000000..630d16925febdb8ef943ea5d7b5faa649f487a9d --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_sociology.yaml @@ -0,0 +1,6 @@ +"dataset_name": "sociology" +"description": "The following are multiple choice questions (with answers) about sociology.\n\ + \n" +"group": "mmlu_flan_n_shot_loglikelihood_social_sciences" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_sociology" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_us_foreign_policy.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_us_foreign_policy.yaml new file mode 100644 index 0000000000000000000000000000000000000000..2274d0670dbe7cd43ab747d95638c000fe3b877f --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_us_foreign_policy.yaml @@ -0,0 +1,6 @@ +"dataset_name": "us_foreign_policy" +"description": "The following are multiple choice questions (with answers) about us\ + \ foreign policy.\n\n" +"group": "mmlu_flan_n_shot_loglikelihood_social_sciences" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_us_foreign_policy" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_virology.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_virology.yaml new file mode 100644 index 0000000000000000000000000000000000000000..8bd36105dfe6381f3f8d289502a716326bdba650 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_virology.yaml @@ -0,0 +1,6 @@ +"dataset_name": "virology" +"description": "The following are multiple choice questions (with answers) about virology.\n\ + \n" +"group": "mmlu_flan_n_shot_loglikelihood_other" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_virology" diff --git a/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_world_religions.yaml b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_world_religions.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e59c28498f3103940076b7846ba68f169c10484e --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/mmlu/flan_n_shot/loglikelihood/mmlu_world_religions.yaml @@ -0,0 +1,6 @@ +"dataset_name": "world_religions" +"description": "The following are multiple choice questions (with answers) about world\ + \ religions.\n\n" +"group": "mmlu_flan_n_shot_loglikelihood_humanities" +"include": "_mmlu_flan_loglikelihood_template_yaml" +"task": "mmlu_flan_n_shot_loglikelihood_world_religions" diff --git a/lm-evaluation/lm_eval/tasks/pile/pile_enron.yaml b/lm-evaluation/lm_eval/tasks/pile/pile_enron.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e344fcfa215c5896b1d23aef1c4d45f5f0f91448 --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/pile/pile_enron.yaml @@ -0,0 +1,3 @@ +include: pile_arxiv.yaml +task: pile_enron +dataset_name: pile_enron diff --git a/lm-evaluation/lm_eval/tasks/pile/pile_europarl.yaml b/lm-evaluation/lm_eval/tasks/pile/pile_europarl.yaml new file mode 100644 index 0000000000000000000000000000000000000000..aad5464be3f1153e8b98568dca003a859e89a34e --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/pile/pile_europarl.yaml @@ -0,0 +1,3 @@ +include: pile_arxiv.yaml +task: pile_europarl +dataset_name: pile_europarl diff --git a/lm-evaluation/lm_eval/tasks/pile/pile_wikipedia.yaml b/lm-evaluation/lm_eval/tasks/pile/pile_wikipedia.yaml new file mode 100644 index 0000000000000000000000000000000000000000..11236e9e8e94d346a7402420ce9dd5e2978333fc --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/pile/pile_wikipedia.yaml @@ -0,0 +1,3 @@ +include: pile_arxiv.yaml +task: pile_wikipedia +dataset_name: pile_wikipedia diff --git a/lm-evaluation/lm_eval/tasks/pile/pile_youtubesubtitles.yaml b/lm-evaluation/lm_eval/tasks/pile/pile_youtubesubtitles.yaml new file mode 100644 index 0000000000000000000000000000000000000000..aaf7376c85dada7ead9b2e9c85648b496cfcf66c --- /dev/null +++ b/lm-evaluation/lm_eval/tasks/pile/pile_youtubesubtitles.yaml @@ -0,0 +1,3 @@ +include: pile_arxiv.yaml +task: pile_youtubesubtitles +dataset_name: pile_youtubesubtitles