Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- lm-evaluation/lm_eval/tasks/mmlu/default/_default_template_yaml +15 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_abstract_algebra.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_anatomy.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_astronomy.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_business_ethics.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_clinical_knowledge.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_college_biology.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_college_chemistry.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_college_computer_science.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_college_mathematics.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_college_medicine.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_college_physics.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_conceptual_physics.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_electrical_engineering.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_formal_logic.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_global_facts.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_high_school_biology.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_high_school_chemistry.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_high_school_computer_science.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_high_school_european_history.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_high_school_geography.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_high_school_government_and_politics.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_high_school_mathematics.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_high_school_microeconomics.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_high_school_physics.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_high_school_psychology.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_human_aging.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_human_sexuality.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_international_law.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_logical_fallacies.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_management.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_marketing.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_medical_genetics.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_miscellaneous.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_moral_disputes.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_moral_scenarios.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_nutrition.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_philosophy.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_prehistory.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_professional_accounting.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_professional_psychology.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_public_relations.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_security_studies.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_us_foreign_policy.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_world_religions.yaml +8 -0
- lm-evaluation/lm_eval/tasks/mmlu/flan_cot_fewshot/_mmlu.yaml +6 -0
- lm-evaluation/lm_eval/tasks/mmlu/flan_cot_fewshot/_mmlu_flan_cot_fewshot_template_yaml +26 -0
- lm-evaluation/lm_eval/tasks/mmlu/flan_cot_fewshot/mmlu_business_ethics.yaml +56 -0
- lm-evaluation/lm_eval/tasks/mmlu/flan_cot_fewshot/mmlu_college_biology.yaml +61 -0
- lm-evaluation/lm_eval/tasks/mmlu/flan_cot_fewshot/mmlu_college_mathematics.yaml +50 -0
lm-evaluation/lm_eval/tasks/mmlu/default/_default_template_yaml
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
dataset_path: hails/mmlu_no_train # a copy of `cais/mmlu` with no auxiliary_train split
|
2 |
+
test_split: test
|
3 |
+
fewshot_split: dev
|
4 |
+
fewshot_config:
|
5 |
+
sampler: first_n
|
6 |
+
output_type: multiple_choice
|
7 |
+
doc_to_text: "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:"
|
8 |
+
doc_to_choice: ["A", "B", "C", "D"]
|
9 |
+
doc_to_target: answer
|
10 |
+
metric_list:
|
11 |
+
- metric: acc
|
12 |
+
aggregation: mean
|
13 |
+
higher_is_better: true
|
14 |
+
metadata:
|
15 |
+
version: 0.0
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_abstract_algebra.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "abstract_algebra"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about abstract\
|
3 |
+
\ algebra.\n\n"
|
4 |
+
"group": "mmlu_stem"
|
5 |
+
"group_alias": "stem"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_abstract_algebra"
|
8 |
+
"task_alias": "abstract_algebra"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_anatomy.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "anatomy"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about anatomy.\n\
|
3 |
+
\n"
|
4 |
+
"group": "mmlu_stem"
|
5 |
+
"group_alias": "stem"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_anatomy"
|
8 |
+
"task_alias": "anatomy"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_astronomy.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "astronomy"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about astronomy.\n\
|
3 |
+
\n"
|
4 |
+
"group": "mmlu_stem"
|
5 |
+
"group_alias": "stem"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_astronomy"
|
8 |
+
"task_alias": "astronomy"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_business_ethics.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "business_ethics"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about business\
|
3 |
+
\ ethics.\n\n"
|
4 |
+
"group": "mmlu_other"
|
5 |
+
"group_alias": "other"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_business_ethics"
|
8 |
+
"task_alias": "business_ethics"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_clinical_knowledge.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "clinical_knowledge"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about clinical\
|
3 |
+
\ knowledge.\n\n"
|
4 |
+
"group": "mmlu_other"
|
5 |
+
"group_alias": "other"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_clinical_knowledge"
|
8 |
+
"task_alias": "clinical_knowledge"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_college_biology.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "college_biology"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about college\
|
3 |
+
\ biology.\n\n"
|
4 |
+
"group": "mmlu_stem"
|
5 |
+
"group_alias": "stem"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_college_biology"
|
8 |
+
"task_alias": "college_biology"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_college_chemistry.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "college_chemistry"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about college\
|
3 |
+
\ chemistry.\n\n"
|
4 |
+
"group": "mmlu_stem"
|
5 |
+
"group_alias": "stem"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_college_chemistry"
|
8 |
+
"task_alias": "college_chemistry"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_college_computer_science.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "college_computer_science"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about college\
|
3 |
+
\ computer science.\n\n"
|
4 |
+
"group": "mmlu_stem"
|
5 |
+
"group_alias": "stem"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_college_computer_science"
|
8 |
+
"task_alias": "college_computer_science"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_college_mathematics.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "college_mathematics"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about college\
|
3 |
+
\ mathematics.\n\n"
|
4 |
+
"group": "mmlu_stem"
|
5 |
+
"group_alias": "stem"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_college_mathematics"
|
8 |
+
"task_alias": "college_mathematics"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_college_medicine.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "college_medicine"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about college\
|
3 |
+
\ medicine.\n\n"
|
4 |
+
"group": "mmlu_other"
|
5 |
+
"group_alias": "other"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_college_medicine"
|
8 |
+
"task_alias": "college_medicine"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_college_physics.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "college_physics"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about college\
|
3 |
+
\ physics.\n\n"
|
4 |
+
"group": "mmlu_stem"
|
5 |
+
"group_alias": "stem"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_college_physics"
|
8 |
+
"task_alias": "college_physics"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_conceptual_physics.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "conceptual_physics"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about conceptual\
|
3 |
+
\ physics.\n\n"
|
4 |
+
"group": "mmlu_stem"
|
5 |
+
"group_alias": "stem"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_conceptual_physics"
|
8 |
+
"task_alias": "conceptual_physics"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_electrical_engineering.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "electrical_engineering"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about electrical\
|
3 |
+
\ engineering.\n\n"
|
4 |
+
"group": "mmlu_stem"
|
5 |
+
"group_alias": "stem"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_electrical_engineering"
|
8 |
+
"task_alias": "electrical_engineering"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_formal_logic.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "formal_logic"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about formal\
|
3 |
+
\ logic.\n\n"
|
4 |
+
"group": "mmlu_humanities"
|
5 |
+
"group_alias": "humanities"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_formal_logic"
|
8 |
+
"task_alias": "formal_logic"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_global_facts.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "global_facts"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about global\
|
3 |
+
\ facts.\n\n"
|
4 |
+
"group": "mmlu_other"
|
5 |
+
"group_alias": "other"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_global_facts"
|
8 |
+
"task_alias": "global_facts"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_high_school_biology.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "high_school_biology"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about high\
|
3 |
+
\ school biology.\n\n"
|
4 |
+
"group": "mmlu_stem"
|
5 |
+
"group_alias": "stem"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_high_school_biology"
|
8 |
+
"task_alias": "high_school_biology"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_high_school_chemistry.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "high_school_chemistry"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about high\
|
3 |
+
\ school chemistry.\n\n"
|
4 |
+
"group": "mmlu_stem"
|
5 |
+
"group_alias": "stem"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_high_school_chemistry"
|
8 |
+
"task_alias": "high_school_chemistry"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_high_school_computer_science.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "high_school_computer_science"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about high\
|
3 |
+
\ school computer science.\n\n"
|
4 |
+
"group": "mmlu_stem"
|
5 |
+
"group_alias": "stem"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_high_school_computer_science"
|
8 |
+
"task_alias": "high_school_computer_science"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_high_school_european_history.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "high_school_european_history"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about high\
|
3 |
+
\ school european history.\n\n"
|
4 |
+
"group": "mmlu_humanities"
|
5 |
+
"group_alias": "humanities"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_high_school_european_history"
|
8 |
+
"task_alias": "high_school_european_history"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_high_school_geography.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "high_school_geography"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about high\
|
3 |
+
\ school geography.\n\n"
|
4 |
+
"group": "mmlu_social_sciences"
|
5 |
+
"group_alias": "social_sciences"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_high_school_geography"
|
8 |
+
"task_alias": "high_school_geography"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_high_school_government_and_politics.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "high_school_government_and_politics"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about high\
|
3 |
+
\ school government and politics.\n\n"
|
4 |
+
"group": "mmlu_social_sciences"
|
5 |
+
"group_alias": "social_sciences"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_high_school_government_and_politics"
|
8 |
+
"task_alias": "high_school_government_and_politics"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_high_school_mathematics.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "high_school_mathematics"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about high\
|
3 |
+
\ school mathematics.\n\n"
|
4 |
+
"group": "mmlu_stem"
|
5 |
+
"group_alias": "stem"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_high_school_mathematics"
|
8 |
+
"task_alias": "high_school_mathematics"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_high_school_microeconomics.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "high_school_microeconomics"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about high\
|
3 |
+
\ school microeconomics.\n\n"
|
4 |
+
"group": "mmlu_social_sciences"
|
5 |
+
"group_alias": "social_sciences"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_high_school_microeconomics"
|
8 |
+
"task_alias": "high_school_microeconomics"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_high_school_physics.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "high_school_physics"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about high\
|
3 |
+
\ school physics.\n\n"
|
4 |
+
"group": "mmlu_stem"
|
5 |
+
"group_alias": "stem"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_high_school_physics"
|
8 |
+
"task_alias": "high_school_physics"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_high_school_psychology.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "high_school_psychology"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about high\
|
3 |
+
\ school psychology.\n\n"
|
4 |
+
"group": "mmlu_social_sciences"
|
5 |
+
"group_alias": "social_sciences"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_high_school_psychology"
|
8 |
+
"task_alias": "high_school_psychology"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_human_aging.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "human_aging"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about human\
|
3 |
+
\ aging.\n\n"
|
4 |
+
"group": "mmlu_other"
|
5 |
+
"group_alias": "other"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_human_aging"
|
8 |
+
"task_alias": "human_aging"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_human_sexuality.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "human_sexuality"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about human\
|
3 |
+
\ sexuality.\n\n"
|
4 |
+
"group": "mmlu_social_sciences"
|
5 |
+
"group_alias": "social_sciences"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_human_sexuality"
|
8 |
+
"task_alias": "human_sexuality"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_international_law.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "international_law"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about international\
|
3 |
+
\ law.\n\n"
|
4 |
+
"group": "mmlu_humanities"
|
5 |
+
"group_alias": "humanities"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_international_law"
|
8 |
+
"task_alias": "international_law"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_logical_fallacies.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "logical_fallacies"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about logical\
|
3 |
+
\ fallacies.\n\n"
|
4 |
+
"group": "mmlu_humanities"
|
5 |
+
"group_alias": "humanities"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_logical_fallacies"
|
8 |
+
"task_alias": "logical_fallacies"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_management.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "management"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about management.\n\
|
3 |
+
\n"
|
4 |
+
"group": "mmlu_other"
|
5 |
+
"group_alias": "other"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_management"
|
8 |
+
"task_alias": "management"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_marketing.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "marketing"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about marketing.\n\
|
3 |
+
\n"
|
4 |
+
"group": "mmlu_other"
|
5 |
+
"group_alias": "other"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_marketing"
|
8 |
+
"task_alias": "marketing"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_medical_genetics.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "medical_genetics"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about medical\
|
3 |
+
\ genetics.\n\n"
|
4 |
+
"group": "mmlu_other"
|
5 |
+
"group_alias": "other"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_medical_genetics"
|
8 |
+
"task_alias": "medical_genetics"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_miscellaneous.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "miscellaneous"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about miscellaneous.\n\
|
3 |
+
\n"
|
4 |
+
"group": "mmlu_other"
|
5 |
+
"group_alias": "other"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_miscellaneous"
|
8 |
+
"task_alias": "miscellaneous"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_moral_disputes.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "moral_disputes"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about moral\
|
3 |
+
\ disputes.\n\n"
|
4 |
+
"group": "mmlu_humanities"
|
5 |
+
"group_alias": "humanities"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_moral_disputes"
|
8 |
+
"task_alias": "moral_disputes"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_moral_scenarios.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "moral_scenarios"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about moral\
|
3 |
+
\ scenarios.\n\n"
|
4 |
+
"group": "mmlu_humanities"
|
5 |
+
"group_alias": "humanities"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_moral_scenarios"
|
8 |
+
"task_alias": "moral_scenarios"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_nutrition.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "nutrition"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about nutrition.\n\
|
3 |
+
\n"
|
4 |
+
"group": "mmlu_other"
|
5 |
+
"group_alias": "other"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_nutrition"
|
8 |
+
"task_alias": "nutrition"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_philosophy.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "philosophy"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about philosophy.\n\
|
3 |
+
\n"
|
4 |
+
"group": "mmlu_humanities"
|
5 |
+
"group_alias": "humanities"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_philosophy"
|
8 |
+
"task_alias": "philosophy"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_prehistory.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "prehistory"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about prehistory.\n\
|
3 |
+
\n"
|
4 |
+
"group": "mmlu_humanities"
|
5 |
+
"group_alias": "humanities"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_prehistory"
|
8 |
+
"task_alias": "prehistory"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_professional_accounting.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "professional_accounting"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about professional\
|
3 |
+
\ accounting.\n\n"
|
4 |
+
"group": "mmlu_other"
|
5 |
+
"group_alias": "other"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_professional_accounting"
|
8 |
+
"task_alias": "professional_accounting"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_professional_psychology.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "professional_psychology"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about professional\
|
3 |
+
\ psychology.\n\n"
|
4 |
+
"group": "mmlu_social_sciences"
|
5 |
+
"group_alias": "social_sciences"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_professional_psychology"
|
8 |
+
"task_alias": "professional_psychology"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_public_relations.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "public_relations"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about public\
|
3 |
+
\ relations.\n\n"
|
4 |
+
"group": "mmlu_social_sciences"
|
5 |
+
"group_alias": "social_sciences"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_public_relations"
|
8 |
+
"task_alias": "public_relations"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_security_studies.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "security_studies"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about security\
|
3 |
+
\ studies.\n\n"
|
4 |
+
"group": "mmlu_social_sciences"
|
5 |
+
"group_alias": "social_sciences"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_security_studies"
|
8 |
+
"task_alias": "security_studies"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_us_foreign_policy.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "us_foreign_policy"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about us\
|
3 |
+
\ foreign policy.\n\n"
|
4 |
+
"group": "mmlu_social_sciences"
|
5 |
+
"group_alias": "social_sciences"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_us_foreign_policy"
|
8 |
+
"task_alias": "us_foreign_policy"
|
lm-evaluation/lm_eval/tasks/mmlu/default/mmlu_world_religions.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "world_religions"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about world\
|
3 |
+
\ religions.\n\n"
|
4 |
+
"group": "mmlu_humanities"
|
5 |
+
"group_alias": "humanities"
|
6 |
+
"include": "_default_template_yaml"
|
7 |
+
"task": "mmlu_world_religions"
|
8 |
+
"task_alias": "world_religions"
|
lm-evaluation/lm_eval/tasks/mmlu/flan_cot_fewshot/_mmlu.yaml
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
group: mmlu_flan_cot_fewshot
|
2 |
+
task:
|
3 |
+
- mmlu_flan_cot_fewshot_stem
|
4 |
+
- mmlu_flan_cot_fewshot_other
|
5 |
+
- mmlu_flan_cot_fewshot_social_sciences
|
6 |
+
- mmlu_flan_cot_fewshot_humanities
|
lm-evaluation/lm_eval/tasks/mmlu/flan_cot_fewshot/_mmlu_flan_cot_fewshot_template_yaml
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
dataset_path: hails/mmlu_no_train # a copy of `cais/mmlu` with no auxiliary_train split
|
2 |
+
validation_split: validation
|
3 |
+
fewshot_split: dev
|
4 |
+
output_type: generate_until
|
5 |
+
doc_to_text: "Q: {{question.strip()}}\n(A) {{choices[0]}} (B) {{choices[1]}} (C) {{choices[2]}} (D) {{choices[3]}}\nA: Let's think step by step."
|
6 |
+
doc_to_target: "{{['(A)', '(B)', '(C)', '(D)'][answer]}}"
|
7 |
+
filter_list:
|
8 |
+
- name: "get-answer"
|
9 |
+
filter:
|
10 |
+
- function: "regex"
|
11 |
+
regex_pattern: "(?<=The answer is )(.*)(?=.)"
|
12 |
+
- function: "take_first"
|
13 |
+
generation_kwargs:
|
14 |
+
until:
|
15 |
+
- "</s>"
|
16 |
+
do_sample: false
|
17 |
+
temperature: 0.0
|
18 |
+
num_fewshot: 0
|
19 |
+
metric_list:
|
20 |
+
- metric: exact_match
|
21 |
+
aggregation: mean
|
22 |
+
higher_is_better: true
|
23 |
+
ignore_case: true
|
24 |
+
ignore_punctuation: true
|
25 |
+
metadata:
|
26 |
+
version: 0.0
|
lm-evaluation/lm_eval/tasks/mmlu/flan_cot_fewshot/mmlu_business_ethics.yaml
ADDED
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "business_ethics"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about business\
|
3 |
+
\ ethics.\n\nQ: In contrast to _______, _______ aim to reward favourable behaviour\
|
4 |
+
\ by companies. The success of such campaigns have been heightened through the use\
|
5 |
+
\ of ___________, which allow campaigns to facilitate the company in achieving _________\
|
6 |
+
\ .\n(A) Buycotts, Boycotts, Blockchain technology, Charitable donations (B) Buycotts,\
|
7 |
+
\ Boycotts, Digital technology, Increased Sales (C) Boycotts, Buyalls, Blockchain\
|
8 |
+
\ technology, Charitable donations (D) Boycotts, Buycotts, Digital technology, Increased\
|
9 |
+
\ Sales\nA: Let's think step by step. We refer to Wikipedia articles on business\
|
10 |
+
\ ethics for help. The sentence that best uses the possible options above is “In\
|
11 |
+
\ contrast to *boycotts*, *buycotts* aim to reward favourable behavior by companies.\
|
12 |
+
\ The success of such campaigns have been heightened through the use of *digital\
|
13 |
+
\ technology*, which allow campaigns to facilitate the company in achieving *increased\
|
14 |
+
\ sales*.” The answer is (D).\n\nQ: _______ is the direct attempt to formally or\
|
15 |
+
\ informally manage ethical issues or problems, through specific policies, practices\
|
16 |
+
\ and programmes.\n(A) Corporate social responsibility (B) Business ethics management\
|
17 |
+
\ (C) Sustainability (D) Environmental management\nA: Let's think step by step.\
|
18 |
+
\ We refer to Wikipedia articles on business ethics for help. The direct attempt\
|
19 |
+
\ manage ethical issues through specific policies, practices, and programs is business\
|
20 |
+
\ ethics management. The answer is (B).\n\nQ: Three contrasting tactics that CSO's\
|
21 |
+
\ can engage in to meet their aims are ________ which typically involves research\
|
22 |
+
\ and communication, ________, which may involve physically attacking a company's\
|
23 |
+
\ operations or ________, often involving some form of _______.\n(A) Non-violent\
|
24 |
+
\ direct action, Violent direct action, Indirect action, Boycott (B) Indirect action,\
|
25 |
+
\ Instrumental action, Non-violent direct action, Information campaign (C) Indirect\
|
26 |
+
\ action, Violent direct action, Non-violent direct-action Boycott (D) Non-violent\
|
27 |
+
\ direct action, Instrumental action, Indirect action, Information campaign\nA:\
|
28 |
+
\ Let's think step by step. We refer to Wikipedia articles on business ethics for\
|
29 |
+
\ help. The sentence that best uses the possible options above is “Three contrasting\
|
30 |
+
\ tactics that CSO's can engage in to meet their aims are *indirect action*, which\
|
31 |
+
\ typically involves research and communication, *violent direct action*, which\
|
32 |
+
\ may involve physically attacking a company's operations or *non-violent direct\
|
33 |
+
\ action*, often involving some form of *boycott*.” The answer is (C).\n\nQ: To\
|
34 |
+
\ ensure the independence of the non-executive board members, there are a number\
|
35 |
+
\ of steps which can be taken, which include non-executives being drawn from _______\
|
36 |
+
\ the company, being appointed for a _________ time period as well as being appointed\
|
37 |
+
\ _________.\n(A) Outside, Limited, Independently (B) Inside, Limited, Intermittently\
|
38 |
+
\ (C) Outside, Unlimited, Intermittently (D) Inside, Unlimited, Independently\n\
|
39 |
+
A: Let's think step by step. We refer to Wikipedia articles on business ethics for\
|
40 |
+
\ help. The sentence that best uses the possible options above is “To ensure the\
|
41 |
+
\ independence of the non-executive board members, there are a number of steps which\
|
42 |
+
\ can be taken, which include non-executives being draw from *outside* the company,\
|
43 |
+
\ being appointed for a *limited* time period as well as being imported *independently*.\
|
44 |
+
\ The answer is (A).\n\nQ: Beyond the business case for engaging in CSR there are\
|
45 |
+
\ a number of moral arguments relating to: negative _______, the _______that corporations\
|
46 |
+
\ possess and the ________ of business and society.\n(A) Externalities, Power, Independence\
|
47 |
+
\ (B) Publicity, Insubstantial resources, Mutual dependence (C) Publicity, Power,\
|
48 |
+
\ Independence (D) Externalities, Power, Mutual dependence\nA: Let's think step\
|
49 |
+
\ by step. We refer to Wikipedia articles on business ethics for help. The sentence\
|
50 |
+
\ that best uses the possible options above is “Beyond the business case for engaging\
|
51 |
+
\ the CSR there are a number of moral arguments relating to: negative *externalities*,\
|
52 |
+
\ the *power* that corporations possess and the *mutual independence* of business\
|
53 |
+
\ and society. The answer is (D).\n\n"
|
54 |
+
"group": "mmlu_flan_cot_fewshot_other"
|
55 |
+
"include": "_mmlu_flan_cot_fewshot_template_yaml"
|
56 |
+
"task": "mmlu_flan_cot_fewshot_business_ethics"
|
lm-evaluation/lm_eval/tasks/mmlu/flan_cot_fewshot/mmlu_college_biology.yaml
ADDED
@@ -0,0 +1,61 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "college_biology"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about college\
|
3 |
+
\ biology.\n\nQ: Which of the following represents an accurate statement concerning\
|
4 |
+
\ arthropods?\n(A) They possess an exoskeleton composed primarily of peptidoglycan.\
|
5 |
+
\ (B) They possess an open circulatory system with a dorsal heart. (C) They are\
|
6 |
+
\ members of a biologically unsuccessful phylum incapable of exploiting diverse\
|
7 |
+
\ habitats and nutrition sources. (D) They lack paired, jointed appendages.\nA:\
|
8 |
+
\ Let's think step by step. Peptidoglycan is known to comprise the plasma membrane\
|
9 |
+
\ of most bacteria, rather than the exoskeleton of arthropods, which is made of\
|
10 |
+
\ chitin, which rules out (A). The answer (C) is false because arthropods are a\
|
11 |
+
\ highly successful phylum. Likewise, arthropods have paired, jointed appendages,\
|
12 |
+
\ which rules out (D). The only remaining option is (B), as arthropods have an open\
|
13 |
+
\ circulatory system with a dorsal tubular heart. The answer is (B).\n\nQ: In a\
|
14 |
+
\ given population, 1 out of every 400 people has a cancer caused by a completely\
|
15 |
+
\ recessive allele, b. Assuming the population is in Hardy-Weinberg equilibrium,\
|
16 |
+
\ which of the following is the expected proportion of individuals who carry the\
|
17 |
+
\ b allele but are not expected to develop the cancer?\n(A) 1/400 (B) 19/400 (C)\
|
18 |
+
\ 20/400 (D) 38/400\nA: Let's think step by step. According to the Hardy Weinberg\
|
19 |
+
\ Law, $p^2 + 2 p q + q^2 = 1$, and $p + q = 1$ where $p$ is the frequency of the\
|
20 |
+
\ dominant allele, $q$ is the frequency of the recessive allele, and $p^2$, $q^2$,\
|
21 |
+
\ and $2pq$ are the frequencies of dominant homozygous, recessive homozygous, and\
|
22 |
+
\ heterozygous individuals, respectively. The frequency of the recessive allele\
|
23 |
+
\ (q) is $\\sqrt{\frac{1}{400}} = 0.05$. We have $p = 1 - q = 0.95$. The frequency\
|
24 |
+
\ of heterozygous individuals is $2pq = 2 \\cdot 0.05 \\cdot 0.95 = 0.095$. The\
|
25 |
+
\ number of heterozygous individuals is equal to the frequency of heterozygous individuals\
|
26 |
+
\ times the size of the population, or $0.095 * 400 = 38$. So we end up with 38/400.\
|
27 |
+
\ The answer is (D).\n\nQ: According to the pressure-flow model of movement of phloem\
|
28 |
+
\ contents, photosynthate movement from source to sink is driven by\n(A) an ATP-dependent\
|
29 |
+
\ pressure-flow pump (B) a water-pressure potential gradient (C) transpiration (D)\
|
30 |
+
\ apoplastic diffusion\nA: Let's think step by step. It is a gradient in water pressure\
|
31 |
+
\ that induces the movement of phloem content, which refers to answer (B). The mechanism\
|
32 |
+
\ of movement does not rely on metabolism, which rules out (A). Transpiration refers\
|
33 |
+
\ to the exhalation of water vapor through plant stomata, and is also not related,\
|
34 |
+
\ which rules out (C). While the apoplastic pathway is one of two main pathways\
|
35 |
+
\ for water transport in plants, it is not central to the pressure flow model, which\
|
36 |
+
\ rules out (D). The answer is (B).\n\nQ: Which of the following contain DNA sequences\
|
37 |
+
\ required for the segregation of chromosomes in mitosis and meiosis?\n(A) Telomeres\
|
38 |
+
\ (B) Centromeres (C) Nucleosomes (D) Spliceosomes\nA: Let's think step by step.\
|
39 |
+
\ The genetic material in Telomeres is not used, which rules out (A). Nucleosomes\
|
40 |
+
\ are the repeating subunit that comprises chromatin packed in a cell nucleus, and\
|
41 |
+
\ do not specifically refer to DNA sequences necessary for segregating chromosomes\
|
42 |
+
\ in cell division, which rules out (C). A spliceosome is a large ribonucleoprotein\
|
43 |
+
\ that removes introns from transcribed pre-mRNA rather than governing chromosome\
|
44 |
+
\ segregation. Centromeres are directly responsible for segregating chromosomes\
|
45 |
+
\ in cell division. The answer is (B).\n\nQ: The presence of homologous structures\
|
46 |
+
\ in two different organisms, such as the humerus in the front limb of a human and\
|
47 |
+
\ a bird, indicates that\n(A) the human and bird are polyphyletic species (B) a\
|
48 |
+
\ human's and bird's evolution is convergent (C) the human and bird belong to a\
|
49 |
+
\ clade (D) the human and bird developed by analogy\nA: Let's think step by step.\
|
50 |
+
\ Polyphyletic species are organisms that are grouped due to having similar characteristics\
|
51 |
+
\ but which do not have a common ancestor. This is not the case for humans and birds,\
|
52 |
+
\ which rules out (A). Convergent evolution refers to the indepdendent development\
|
53 |
+
\ of similar features in different species at different periods, which is also not\
|
54 |
+
\ the case for humans and birds, which rules out (B). Analogy refers to the superficial\
|
55 |
+
\ resemblance of structures that have different origins, which is not the case for\
|
56 |
+
\ the human and bird forearms, which rules out (D). Humans and birds do belong to\
|
57 |
+
\ the same clade - a group of organisms composed of a common ancestor. The answer\
|
58 |
+
\ is (C).\n\n"
|
59 |
+
"group": "mmlu_flan_cot_fewshot_stem"
|
60 |
+
"include": "_mmlu_flan_cot_fewshot_template_yaml"
|
61 |
+
"task": "mmlu_flan_cot_fewshot_college_biology"
|
lm-evaluation/lm_eval/tasks/mmlu/flan_cot_fewshot/mmlu_college_mathematics.yaml
ADDED
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"dataset_name": "college_mathematics"
|
2 |
+
"description": "The following are multiple choice questions (with answers) about college\
|
3 |
+
\ mathematics.\n\nQ: Let V be the set of all real polynomials p(x). Let transformations\
|
4 |
+
\ T, S be defined on V by T:p(x) -> xp(x) and S:p(x) -> p'(x) = d/dx p(x), and interpret\
|
5 |
+
\ (ST)(p(x)) as S(T(p(x))). Which of the following is true?\n(A) ST = 0 (B) ST =\
|
6 |
+
\ T (C) ST = TS (D) ST - TS is the identity map of V onto itself.\nA: Let's think\
|
7 |
+
\ step by step. For a given polynomial $p$ we have\n\\[ST(p) = (xp(x))’ = p(x) +\
|
8 |
+
\ xp’(x)\\]\nand\n\\[TS(p) = xp’(x).\\]\nHence \\[ST(p) - TS(p) = p(x) + xp’(x)\
|
9 |
+
\ - xp’(x).\\] The answer is (D).\n\nQ: Suppose that f(1 + x) = f(x) for all real\
|
10 |
+
\ x. If f is a polynomial and f(5) = 11, then f(15/2)\n(A) -11 (B) 0 (C) 11 (D)\
|
11 |
+
\ 33/2\nA: Let's think step by step. The only polynomial so that $f(1 + x) = f(x)$\
|
12 |
+
\ is a constant polynomial. Hence $f(5) = 11 = f(15/2)$. The answer is (C).\n\n\
|
13 |
+
Q: Let A be a real 2x2 matrix. Which of the following statements must be true?\n\
|
14 |
+
I. All of the entries of A^2 are nonnegative.\nII. The determinant of A^2 is nonnegative.\n\
|
15 |
+
III. If A has two distinct eigenvalues, then A^2 has two distinct eigenvalues.\n\
|
16 |
+
(A) I only (B) II only (C) III only (D) II and III only\nA: Let's think step by\
|
17 |
+
\ step. We have \\[ det(A^2) = (det(A))^2 \\geq 0,\\] hence II holds.\nIII is false:\
|
18 |
+
\ as a counterexample take a diagonal matrix with -1 and 1 on the diagonal. Then\
|
19 |
+
\ $A^2$ is the identity matrix. The answer is (B).\n\nQ: Let A be the set of all\
|
20 |
+
\ ordered pairs of integers (m, n) such that 7m + 12n = 22. What is the greatest\
|
21 |
+
\ negative number in the set B = {m + n : (m, n) \\in A}?\n(A) -5 (B) -4 (C) -3\
|
22 |
+
\ (D) -2\nA: Let's think step by step. We have 12n = 22 - 7m and one of the solutions\
|
23 |
+
\ is $m = -2$, $n = 3$. Then $m + n = 1$, hence we need to look for smaller $m$\
|
24 |
+
\ in order to make $m + n$ negative. The next solution is $m = -14$ and $n = 10$.\
|
25 |
+
\ For smaller $m$ we have $m + n$ smaller than $-4$. The answer is (B).\n\nQ: A\
|
26 |
+
\ tank initially contains a salt solution of 3 grams of salt dissolved in 100 liters\
|
27 |
+
\ of water. A salt solution containing 0.02 grams of salt per liter of water is\
|
28 |
+
\ sprayed into the tank at a rate of 4 liters per minute. The sprayed solution is\
|
29 |
+
\ continually mixed with the salt solution in the tank, and the mixture flows out\
|
30 |
+
\ of the tank at a rate of 4 liters per minute. If the mixing is instantaneous,\
|
31 |
+
\ how many grams of salt are in the tank after 100 minutes have elapsed?\n(A) 2\
|
32 |
+
\ (B) 2 - e^-2 (C) 2 + e^-2 (D) 2 + e^-4\nA: Let's think step by step. For all $t\
|
33 |
+
\ \\in \\mathbb{R}$, let $s(t)$ denote the number grams of salt in the tank at the\
|
34 |
+
\ $t$ minute mark. Then $s(0) = 3$.\nWe use $s$ and $s(t)$ interchangeably. We also\
|
35 |
+
\ use $s^{\\prime}$ and $s^{\\prime}(t)$ interchangeably. The solution sprayed into\
|
36 |
+
\ the tank adds $(0.02) 4=2 / 25$ grams of salt per minute. There are always 100\
|
37 |
+
\ liters of liquid in the tank, containing $s$ grams of salt. So the density of\
|
38 |
+
\ salt in the tank is $s / 100$ grams per liter. The flow of water out of the tank\
|
39 |
+
\ therefore subtracts $4(s / 100)=s / 25$ grams of salt per minute. Then, for all\
|
40 |
+
\ $t \\in \\mathbb{R}$, we have $s^{\\prime}(t)=(2 / 25)-(s / 25)=(2-s) / 25$, and\
|
41 |
+
\ so $[s(t)=2] \\Rightarrow\\left[s^{\\prime}(t)=0\right]$. For all $t \\in \\mathbb{R}$,\n\
|
42 |
+
$$\n\frac{d}{d t}[\\ln (s-2)]=\frac{s^{\\prime}}{s-2}=\frac{-1}{25}=\frac{d}{d t}\\\
|
43 |
+
left[-\frac{t}{25}\right] .\n$$\nChoose $C \\in \\mathbb{R}$ such that, for all\
|
44 |
+
\ $t \\in \\mathbb{R}, \\ln ((s(t)-2))=-[t / 25]+C$. Let $K:=e^{C}$. Then, for all\
|
45 |
+
\ $t \\in \\mathbb{R}$, we have $(s(t))-2=K e^{-t / 25}$, and so $s(t)=2+K e^{-t\
|
46 |
+
\ / 25}$. Then $3=s(0)=2+K e^{0}=2+K$, so $K=1$. Then $s(100)=2+K e^{-100 / 25}=2+1\
|
47 |
+
\ \\cdot e^{-4}=2+e^{-4}$. The answer is (D).\n\n"
|
48 |
+
"group": "mmlu_flan_cot_fewshot_stem"
|
49 |
+
"include": "_mmlu_flan_cot_fewshot_template_yaml"
|
50 |
+
"task": "mmlu_flan_cot_fewshot_college_mathematics"
|