Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- lm-evaluation/tests/testdata/anli_r1-v0-loglikelihood +1 -0
- lm-evaluation/tests/testdata/arc_challenge-v2.0-loglikelihood +1 -0
- lm-evaluation/tests/testdata/arithmetic_4da-v0-res.json +1 -0
- lm-evaluation/tests/testdata/arithmetic_5da-v0-loglikelihood +1 -0
- lm-evaluation/tests/testdata/blimp_anaphor_number_agreement-v0-loglikelihood +1 -0
- lm-evaluation/tests/testdata/blimp_animate_subject_passive-v0-res.json +1 -0
- lm-evaluation/tests/testdata/blimp_animate_subject_trans-v0-res.json +1 -0
- lm-evaluation/tests/testdata/blimp_causative-v0-loglikelihood +1 -0
- lm-evaluation/tests/testdata/blimp_coordinate_structure_constraint_complex_left_branch-v0-loglikelihood +1 -0
- lm-evaluation/tests/testdata/blimp_determiner_noun_agreement_1-v0-res.json +1 -0
- lm-evaluation/tests/testdata/blimp_determiner_noun_agreement_irregular_1-v0-res.json +1 -0
- lm-evaluation/tests/testdata/blimp_determiner_noun_agreement_with_adj_irregular_2-v0-res.json +1 -0
- lm-evaluation/tests/testdata/blimp_determiner_noun_agreement_with_adjective_1-v0-loglikelihood +1 -0
- lm-evaluation/tests/testdata/blimp_distractor_agreement_relational_noun-v0-loglikelihood +1 -0
- lm-evaluation/tests/testdata/blimp_distractor_agreement_relative_clause-v0-res.json +1 -0
- lm-evaluation/tests/testdata/blimp_existential_there_quantifiers_1-v0-res.json +1 -0
- lm-evaluation/tests/testdata/blimp_existential_there_subject_raising-v0-loglikelihood +1 -0
- lm-evaluation/tests/testdata/blimp_expletive_it_object_raising-v0-res.json +1 -0
- lm-evaluation/tests/testdata/blimp_irregular_plural_subject_verb_agreement_1-v0-loglikelihood +1 -0
- lm-evaluation/tests/testdata/blimp_regular_plural_subject_verb_agreement_1-v0-res.json +1 -0
- lm-evaluation/tests/testdata/blimp_sentential_subject_island-v0-res.json +1 -0
- lm-evaluation/tests/testdata/blimp_wh_questions_subject_gap_long_distance-v0-res.json +1 -0
- lm-evaluation/tests/testdata/blimp_wh_vs_that_with_gap-v0-res.json +1 -0
- lm-evaluation/tests/testdata/boolq-v1-loglikelihood +1 -0
- lm-evaluation/tests/testdata/coqa-v0-greedy_until +1 -0
- lm-evaluation/tests/testdata/crows_pairs_english_nationality-v0-res.json +1 -0
- lm-evaluation/tests/testdata/crows_pairs_english_physical_appearance-v0-loglikelihood +1 -0
- lm-evaluation/tests/testdata/crows_pairs_french-v0-res.json +1 -0
- lm-evaluation/tests/testdata/crows_pairs_french_age-v0-loglikelihood +1 -0
- lm-evaluation/tests/testdata/crows_pairs_french_age-v0-res.json +1 -0
- lm-evaluation/tests/testdata/crows_pairs_french_disability-v0-res.json +1 -0
- lm-evaluation/tests/testdata/crows_pairs_french_nationality-v0-loglikelihood +1 -0
- lm-evaluation/tests/testdata/crows_pairs_french_sexual_orientation-v0-res.json +1 -0
- lm-evaluation/tests/testdata/headqa_es-v0-res.json +1 -0
- lm-evaluation/tests/testdata/hendrycksTest-business_ethics-v0-loglikelihood +1 -0
- lm-evaluation/tests/testdata/hendrycksTest-college_chemistry-v0-loglikelihood +1 -0
- lm-evaluation/tests/testdata/hendrycksTest-formal_logic-v0-res.json +1 -0
- lm-evaluation/tests/testdata/hendrycksTest-high_school_geography-v0-res.json +1 -0
- lm-evaluation/tests/testdata/hendrycksTest-high_school_mathematics-v0-loglikelihood +1 -0
- lm-evaluation/tests/testdata/hendrycksTest-high_school_microeconomics-v0-res.json +1 -0
- lm-evaluation/tests/testdata/hendrycksTest-high_school_us_history-v0-res.json +1 -0
- lm-evaluation/tests/testdata/hendrycksTest-international_law-v0-loglikelihood +1 -0
- lm-evaluation/tests/testdata/hendrycksTest-miscellaneous-v0-res.json +1 -0
- lm-evaluation/tests/testdata/hendrycksTest-public_relations-v0-loglikelihood +1 -0
- lm-evaluation/tests/testdata/hendrycksTest-virology-v0-res.json +1 -0
- lm-evaluation/tests/testdata/hendrycksTest-world_religions-v0-loglikelihood +1 -0
- lm-evaluation/tests/testdata/iwslt17-en-ar-v0-res.json +1 -0
- lm-evaluation/tests/testdata/lambada_mt_es-v0-res.json +1 -0
- lm-evaluation/tests/testdata/lambada_openai_mt_en-v0-res.json +1 -0
- lm-evaluation/tests/testdata/math_algebra-v1-res.json +1 -0
lm-evaluation/tests/testdata/anli_r1-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
3a84baf2f170e138c6ce0bc9f06f905def35d705fa2b8781f10c87aef404c4cb
|
lm-evaluation/tests/testdata/arc_challenge-v2.0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
8ebbbc510644ede7bf53496c381e276d5a1eec14828870e8b7e611f231e6d5f6
|
lm-evaluation/tests/testdata/arithmetic_4da-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"arithmetic_4da": {"acc": 0.0, "acc_stderr": 0.0}}, "versions": {"arithmetic_4da": 0}}
|
lm-evaluation/tests/testdata/arithmetic_5da-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
49edb1e735660631ea6cc309721e6c0b80b7106a613a6959514852ca48f1130e
|
lm-evaluation/tests/testdata/blimp_anaphor_number_agreement-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
0bdad31c974ba064e1f1ba931841ec2ba7461e8b0ca54ea5f79f08b6bae0bab5
|
lm-evaluation/tests/testdata/blimp_animate_subject_passive-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"blimp_animate_subject_passive": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_animate_subject_passive": 0}}
|
lm-evaluation/tests/testdata/blimp_animate_subject_trans-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"blimp_animate_subject_trans": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_animate_subject_trans": 0}}
|
lm-evaluation/tests/testdata/blimp_causative-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
3d67ad025185dbb0808ebd7f508edcb5750c18fc3c01ad91f20fda80780c916c
|
lm-evaluation/tests/testdata/blimp_coordinate_structure_constraint_complex_left_branch-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
7e1cc5b9f71abfbe56c4bdf343a1e5632785b66a986b8e904a41ed8f45a2c33e
|
lm-evaluation/tests/testdata/blimp_determiner_noun_agreement_1-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"blimp_determiner_noun_agreement_1": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_determiner_noun_agreement_1": 0}}
|
lm-evaluation/tests/testdata/blimp_determiner_noun_agreement_irregular_1-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"blimp_determiner_noun_agreement_irregular_1": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_determiner_noun_agreement_irregular_1": 0}}
|
lm-evaluation/tests/testdata/blimp_determiner_noun_agreement_with_adj_irregular_2-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"blimp_determiner_noun_agreement_with_adj_irregular_2": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_determiner_noun_agreement_with_adj_irregular_2": 0}}
|
lm-evaluation/tests/testdata/blimp_determiner_noun_agreement_with_adjective_1-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
007c47e5fbf88119c5180feef75e1345d448e56adcd4c7ab2d52fb8d67350d34
|
lm-evaluation/tests/testdata/blimp_distractor_agreement_relational_noun-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
8aab641bd5933f84f46a14f5c1208a3c855cace7e67b44abcd5aff8fec96717d
|
lm-evaluation/tests/testdata/blimp_distractor_agreement_relative_clause-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"blimp_distractor_agreement_relative_clause": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_distractor_agreement_relative_clause": 0}}
|
lm-evaluation/tests/testdata/blimp_existential_there_quantifiers_1-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"blimp_existential_there_quantifiers_1": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_existential_there_quantifiers_1": 0}}
|
lm-evaluation/tests/testdata/blimp_existential_there_subject_raising-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
9b324b28ae3e1b5d49ecf4b7b2a16c7bbc8ff38d000cf216fab75df633da2084
|
lm-evaluation/tests/testdata/blimp_expletive_it_object_raising-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"blimp_expletive_it_object_raising": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_expletive_it_object_raising": 0}}
|
lm-evaluation/tests/testdata/blimp_irregular_plural_subject_verb_agreement_1-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
7084358b1b7dd7fb5ead1a58f4b499d6f7610eca897bfac25a986d0f9a91aa5d
|
lm-evaluation/tests/testdata/blimp_regular_plural_subject_verb_agreement_1-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"blimp_regular_plural_subject_verb_agreement_1": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_regular_plural_subject_verb_agreement_1": 0}}
|
lm-evaluation/tests/testdata/blimp_sentential_subject_island-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"blimp_sentential_subject_island": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_sentential_subject_island": 0}}
|
lm-evaluation/tests/testdata/blimp_wh_questions_subject_gap_long_distance-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"blimp_wh_questions_subject_gap_long_distance": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_wh_questions_subject_gap_long_distance": 0}}
|
lm-evaluation/tests/testdata/blimp_wh_vs_that_with_gap-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"blimp_wh_vs_that_with_gap": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_wh_vs_that_with_gap": 0}}
|
lm-evaluation/tests/testdata/boolq-v1-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
6577e0d88572772ef08e64f624c0e3df0953286ae1f118ccef15623b59ffeabf
|
lm-evaluation/tests/testdata/coqa-v0-greedy_until
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
4a8605d5deed0423ec095700251ed93325b45d320aca35d4ce1e94702094435e
|
lm-evaluation/tests/testdata/crows_pairs_english_nationality-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"crows_pairs_english_nationality": {"likelihood_difference": 0.3383027778174895, "likelihood_difference_stderr": 0.015957585374543233, "pct_stereotype": 0.4675925925925926, "pct_stereotype_stderr": 0.03402801581358966}}, "versions": {"crows_pairs_english_nationality": 0}}
|
lm-evaluation/tests/testdata/crows_pairs_english_physical_appearance-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
d1823f5038afafa7a5338e42531720480c8ccf4e177789526caf294d52d56e89
|
lm-evaluation/tests/testdata/crows_pairs_french-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"crows_pairs_french": {"likelihood_difference": 0.3367363060632734, "likelihood_difference_stderr": 0.005827747024053628, "pct_stereotype": 0.5062611806797853, "pct_stereotype_stderr": 0.012212341600228745}}, "versions": {"crows_pairs_french": 0}}
|
lm-evaluation/tests/testdata/crows_pairs_french_age-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
b14a5769f415a234abe89063a1b546aa4a990c84217e5d4a697874cd7f85af35
|
lm-evaluation/tests/testdata/crows_pairs_french_age-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"crows_pairs_french_age": {"likelihood_difference": 0.31896094607685194, "likelihood_difference_stderr": 0.024068391933540753, "pct_stereotype": 0.4444444444444444, "pct_stereotype_stderr": 0.05267171812666418}}, "versions": {"crows_pairs_french_age": 0}}
|
lm-evaluation/tests/testdata/crows_pairs_french_disability-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"crows_pairs_french_disability": {"likelihood_difference": 0.31387939561315326, "likelihood_difference_stderr": 0.027598132299657168, "pct_stereotype": 0.36363636363636365, "pct_stereotype_stderr": 0.05966637484671758}}, "versions": {"crows_pairs_french_disability": 0}}
|
lm-evaluation/tests/testdata/crows_pairs_french_nationality-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
146eb60c8796fe3f25307a6776337f0b077b58ce02edec64c99df4b906c19b9f
|
lm-evaluation/tests/testdata/crows_pairs_french_sexual_orientation-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"crows_pairs_french_sexual_orientation": {"likelihood_difference": 0.3160680928470684, "likelihood_difference_stderr": 0.02397758321605678, "pct_stereotype": 0.43956043956043955, "pct_stereotype_stderr": 0.05231815698566189}}, "versions": {"crows_pairs_french_sexual_orientation": 0}}
|
lm-evaluation/tests/testdata/headqa_es-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"headqa_es": {"acc": 0.23559445660102116, "acc_norm": 0.25018234865062, "acc_norm_stderr": 0.008272783230806014, "acc_stderr": 0.008105688874297972}}, "versions": {"headqa_es": 0}}
|
lm-evaluation/tests/testdata/hendrycksTest-business_ethics-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
b3b27e9dbad587377d3c8cab1072782de883e245da93a563bd8b3099017b1fc0
|
lm-evaluation/tests/testdata/hendrycksTest-college_chemistry-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
044752b21540db95118b8cbe7e75c4c9b8758e27df56543deaeadec7f749a28d
|
lm-evaluation/tests/testdata/hendrycksTest-formal_logic-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"hendrycksTest-formal_logic": {"acc": 0.25396825396825395, "acc_norm": 0.2698412698412698, "acc_norm_stderr": 0.03970158273235172, "acc_stderr": 0.03893259610604674}}, "versions": {"hendrycksTest-formal_logic": 0}}
|
lm-evaluation/tests/testdata/hendrycksTest-high_school_geography-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"hendrycksTest-high_school_geography": {"acc": 0.2474747474747475, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.03191178226713547, "acc_stderr": 0.03074630074212452}}, "versions": {"hendrycksTest-high_school_geography": 0}}
|
lm-evaluation/tests/testdata/hendrycksTest-high_school_mathematics-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
ab368d16fc4648ad27940f71abd266366663f51db612f732a0b9b0eea28de9f8
|
lm-evaluation/tests/testdata/hendrycksTest-high_school_microeconomics-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"hendrycksTest-high_school_microeconomics": {"acc": 0.24369747899159663, "acc_norm": 0.22268907563025211, "acc_norm_stderr": 0.027025433498882378, "acc_stderr": 0.027886828078380558}}, "versions": {"hendrycksTest-high_school_microeconomics": 0}}
|
lm-evaluation/tests/testdata/hendrycksTest-high_school_us_history-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"hendrycksTest-high_school_us_history": {"acc": 0.29901960784313725, "acc_norm": 0.28431372549019607, "acc_norm_stderr": 0.03166009679399814, "acc_stderr": 0.03213325717373618}}, "versions": {"hendrycksTest-high_school_us_history": 0}}
|
lm-evaluation/tests/testdata/hendrycksTest-international_law-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
ea9b2cefd27959db564168f6ad1169a5eaa012fc5a5d5b8faf9e34d94e335dc1
|
lm-evaluation/tests/testdata/hendrycksTest-miscellaneous-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"hendrycksTest-miscellaneous": {"acc": 0.23499361430395913, "acc_norm": 0.2515964240102171, "acc_norm_stderr": 0.015517322365529622, "acc_stderr": 0.015162024152278445}}, "versions": {"hendrycksTest-miscellaneous": 0}}
|
lm-evaluation/tests/testdata/hendrycksTest-public_relations-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
ab70f500cf24e876f6ae6bdc27525a1d6074fa9b6ea97770255d9fc2559b36ff
|
lm-evaluation/tests/testdata/hendrycksTest-virology-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"hendrycksTest-virology": {"acc": 0.27710843373493976, "acc_norm": 0.2710843373493976, "acc_norm_stderr": 0.03460579907553027, "acc_stderr": 0.034843315926805875}}, "versions": {"hendrycksTest-virology": 0}}
|
lm-evaluation/tests/testdata/hendrycksTest-world_religions-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
97a0f68ba30ea3a6ef1db1a2925c964b09ecc54455a0a930da083e52677815bd
|
lm-evaluation/tests/testdata/iwslt17-en-ar-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"iwslt17-en-ar": {"bleu": 0.0, "bleu_stderr": 0.0, "chrf": 0.0, "chrf_stderr": 0.0, "ter": 1.0, "ter_stderr": 0.0}}, "versions": {"iwslt17-en-ar": 0}}
|
lm-evaluation/tests/testdata/lambada_mt_es-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"lambada_mt_es": {"acc": 0.0, "acc_stderr": 0.0, "ppl": 1.6479047769869253, "ppl_stderr": 0.006497321146240192}}, "versions": {"lambada_mt_es": 0}}
|
lm-evaluation/tests/testdata/lambada_openai_mt_en-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"lambada_openai_mt_en": {"acc": 0.0, "acc_stderr": 0.0, "ppl": 1.6479047769869253, "ppl_stderr": 0.006497321146240192}}, "versions": {"lambada_openai_mt_en": 0}}
|
lm-evaluation/tests/testdata/math_algebra-v1-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"math_algebra": {"acc": 0.0, "acc_stderr": 0.0}}, "versions": {"math_algebra": 1}}
|