Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- lm-evaluation/tests/testdata/anagrams1-v0-res.json +1 -0
- lm-evaluation/tests/testdata/anli_r3-v0-res.json +1 -0
- lm-evaluation/tests/testdata/arc_easy-v0-res.json +1 -0
- lm-evaluation/tests/testdata/arithmetic_2da-v0-res.json +1 -0
- lm-evaluation/tests/testdata/arithmetic_2dm-v0-res.json +1 -0
- lm-evaluation/tests/testdata/arithmetic_4ds-v0-res.json +1 -0
- lm-evaluation/tests/testdata/blimp_complex_NP_island-v0-res.json +1 -0
- lm-evaluation/tests/testdata/blimp_determiner_noun_agreement_1-v0-loglikelihood +1 -0
- lm-evaluation/tests/testdata/blimp_determiner_noun_agreement_with_adj_2-v0-res.json +1 -0
- lm-evaluation/tests/testdata/blimp_determiner_noun_agreement_with_adj_irregular_1-v0-res.json +1 -0
- lm-evaluation/tests/testdata/blimp_matrix_question_npi_licensor_present-v0-res.json +1 -0
- lm-evaluation/tests/testdata/blimp_passive_1-v0-res.json +1 -0
- lm-evaluation/tests/testdata/blimp_principle_A_case_1-v0-res.json +1 -0
- lm-evaluation/tests/testdata/blimp_principle_A_reconstruction-v0-loglikelihood +1 -0
- lm-evaluation/tests/testdata/blimp_wh_questions_subject_gap-v0-loglikelihood +1 -0
- lm-evaluation/tests/testdata/blimp_wh_vs_that_no_gap_long_distance-v0-res.json +1 -0
- lm-evaluation/tests/testdata/boolq-v1-res.json +1 -0
- lm-evaluation/tests/testdata/crows_pairs_english_autre-v0-res.json +1 -0
- lm-evaluation/tests/testdata/crows_pairs_french_religion-v0-loglikelihood +1 -0
- lm-evaluation/tests/testdata/drop-v1-greedy_until +1 -0
- lm-evaluation/tests/testdata/ethics_deontology-v0-loglikelihood +1 -0
- lm-evaluation/tests/testdata/ethics_utilitarianism-v0-loglikelihood +1 -0
- lm-evaluation/tests/testdata/hendrycksTest-abstract_algebra-v0-loglikelihood +1 -0
- lm-evaluation/tests/testdata/hendrycksTest-astronomy-v0-loglikelihood +1 -0
- lm-evaluation/tests/testdata/hendrycksTest-college_chemistry-v0-res.json +1 -0
- lm-evaluation/tests/testdata/hendrycksTest-computer_security-v0-loglikelihood +1 -0
- lm-evaluation/tests/testdata/hendrycksTest-computer_security-v0-res.json +1 -0
- lm-evaluation/tests/testdata/hendrycksTest-conceptual_physics-v0-res.json +1 -0
- lm-evaluation/tests/testdata/hendrycksTest-econometrics-v0-res.json +1 -0
- lm-evaluation/tests/testdata/hendrycksTest-high_school_biology-v0-loglikelihood +1 -0
- lm-evaluation/tests/testdata/hendrycksTest-high_school_physics-v0-loglikelihood +1 -0
- lm-evaluation/tests/testdata/hendrycksTest-high_school_statistics-v0-loglikelihood +1 -0
- lm-evaluation/tests/testdata/hendrycksTest-high_school_us_history-v0-loglikelihood +1 -0
- lm-evaluation/tests/testdata/hendrycksTest-human_aging-v0-res.json +1 -0
- lm-evaluation/tests/testdata/hendrycksTest-international_law-v0-res.json +1 -0
- lm-evaluation/tests/testdata/hendrycksTest-marketing-v0-loglikelihood +1 -0
- lm-evaluation/tests/testdata/hendrycksTest-sociology-v0-loglikelihood +1 -0
- lm-evaluation/tests/testdata/lambada-v0-res.json +1 -0
- lm-evaluation/tests/testdata/lambada_mt_fr-v0-loglikelihood +1 -0
- lm-evaluation/tests/testdata/lambada_openai-v0-res.json +1 -0
- lm-evaluation/tests/testdata/math_intermediate_algebra-v1-res.json +1 -0
- lm-evaluation/tests/testdata/math_num_theory-v1-res.json +1 -0
- lm-evaluation/tests/testdata/mathqa-v0-loglikelihood +1 -0
- lm-evaluation/tests/testdata/mc_taco-v0-loglikelihood +1 -0
- lm-evaluation/tests/testdata/mnli_mismatched-v0-res.json +1 -0
- lm-evaluation/tests/testdata/multirc-v1-res.json +1 -0
- lm-evaluation/tests/testdata/mutual_plus-v1-res.json +1 -0
- lm-evaluation/tests/testdata/pile_arxiv-v1-res.json +1 -0
- lm-evaluation/tests/testdata/pile_bookcorpus2-v1-loglikelihood_rolling +1 -0
- lm-evaluation/tests/testdata/pile_books3-v0-res.json +1 -0
lm-evaluation/tests/testdata/anagrams1-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"anagrams1": {"acc": 0.0, "acc_stderr": 0.0}}, "versions": {"anagrams1": 0}}
|
lm-evaluation/tests/testdata/anli_r3-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"anli_r3": {"acc": 0.31916666666666665, "acc_stderr": 0.01346230971200514}}, "versions": {"anli_r3": 0}}
|
lm-evaluation/tests/testdata/arc_easy-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"arc_easy": {"acc": 0.2474747474747475, "acc_norm": 0.24074074074074073, "acc_norm_stderr": 0.008772796145221907, "acc_stderr": 0.008855114414834707}}, "versions": {"arc_easy": 0}}
|
lm-evaluation/tests/testdata/arithmetic_2da-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"arithmetic_2da": {"acc": 0.0, "acc_stderr": 0.0}}, "versions": {"arithmetic_2da": 0}}
|
lm-evaluation/tests/testdata/arithmetic_2dm-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"arithmetic_2dm": {"acc": 0.0, "acc_stderr": 0.0}}, "versions": {"arithmetic_2dm": 0}}
|
lm-evaluation/tests/testdata/arithmetic_4ds-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"arithmetic_4ds": {"acc": 0.0, "acc_stderr": 0.0}}, "versions": {"arithmetic_4ds": 0}}
|
lm-evaluation/tests/testdata/blimp_complex_NP_island-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"blimp_complex_NP_island": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_complex_NP_island": 0}}
|
lm-evaluation/tests/testdata/blimp_determiner_noun_agreement_1-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
2df8cc7f17089f7e8c7d974dcb324c809d30ef059a5be22aed6b69f44230809f
|
lm-evaluation/tests/testdata/blimp_determiner_noun_agreement_with_adj_2-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"blimp_determiner_noun_agreement_with_adj_2": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_determiner_noun_agreement_with_adj_2": 0}}
|
lm-evaluation/tests/testdata/blimp_determiner_noun_agreement_with_adj_irregular_1-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"blimp_determiner_noun_agreement_with_adj_irregular_1": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_determiner_noun_agreement_with_adj_irregular_1": 0}}
|
lm-evaluation/tests/testdata/blimp_matrix_question_npi_licensor_present-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"blimp_matrix_question_npi_licensor_present": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_matrix_question_npi_licensor_present": 0}}
|
lm-evaluation/tests/testdata/blimp_passive_1-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"blimp_passive_1": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_passive_1": 0}}
|
lm-evaluation/tests/testdata/blimp_principle_A_case_1-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"blimp_principle_A_case_1": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_principle_A_case_1": 0}}
|
lm-evaluation/tests/testdata/blimp_principle_A_reconstruction-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
894efedfd8750d5b8de6157f9b2ed2b51b5290d3a78ea9b041fc62d34e96efbc
|
lm-evaluation/tests/testdata/blimp_wh_questions_subject_gap-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
d5486ffcc075cad4302e37ece9bbf5b2063c0b5a48e76c8e1dd365e22a5a48fc
|
lm-evaluation/tests/testdata/blimp_wh_vs_that_no_gap_long_distance-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"blimp_wh_vs_that_no_gap_long_distance": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_wh_vs_that_no_gap_long_distance": 0}}
|
lm-evaluation/tests/testdata/boolq-v1-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"boolq": {"acc": 0.5048929663608562, "acc_stderr": 0.00874463623355505}}, "versions": {"boolq": 1}}
|
lm-evaluation/tests/testdata/crows_pairs_english_autre-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"crows_pairs_english_autre": {"likelihood_difference": 0.3424336593343321, "likelihood_difference_stderr": 0.08588068996335849, "pct_stereotype": 0.2727272727272727, "pct_stereotype_stderr": 0.14083575804390605}}, "versions": {"crows_pairs_english_autre": 0}}
|
lm-evaluation/tests/testdata/crows_pairs_french_religion-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
8af6445eeb634dad5f0723e40615afe993e1e3f129a4f314fe4117e633c2efd3
|
lm-evaluation/tests/testdata/drop-v1-greedy_until
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
a670f911ab2999d72db15f534b22703d19e7837edbda4f9f199ad587f7aae6b2
|
lm-evaluation/tests/testdata/ethics_deontology-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
74ecebe322457d70afc16fde848978410a09b854dc65c47f428d100bd1593248
|
lm-evaluation/tests/testdata/ethics_utilitarianism-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
88872f1ed1b203f9649a4ced4fb4627d18c17af455d713de6e17c05eced4ec60
|
lm-evaluation/tests/testdata/hendrycksTest-abstract_algebra-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
e35d1eeb356ac1084d4e9773f028cb3c81ba1c6e5574d598ac4a78aa467cd797
|
lm-evaluation/tests/testdata/hendrycksTest-astronomy-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
bed1e47127cc2893c6aef63b9a0909cca31aa351a703da2a166b01cae03c3311
|
lm-evaluation/tests/testdata/hendrycksTest-college_chemistry-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"hendrycksTest-college_chemistry": {"acc": 0.28, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768078, "acc_stderr": 0.04512608598542127}}, "versions": {"hendrycksTest-college_chemistry": 0}}
|
lm-evaluation/tests/testdata/hendrycksTest-computer_security-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
a8a1892d1906cc3e7ffd321043f0a60f3b8b69ef76e5c6ff03c6ea41dc87d0cb
|
lm-evaluation/tests/testdata/hendrycksTest-computer_security-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"hendrycksTest-computer_security": {"acc": 0.24, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394, "acc_stderr": 0.042923469599092816}}, "versions": {"hendrycksTest-computer_security": 0}}
|
lm-evaluation/tests/testdata/hendrycksTest-conceptual_physics-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"hendrycksTest-conceptual_physics": {"acc": 0.2680851063829787, "acc_norm": 0.2553191489361702, "acc_norm_stderr": 0.028504856470514185, "acc_stderr": 0.028957342788342347}}, "versions": {"hendrycksTest-conceptual_physics": 0}}
|
lm-evaluation/tests/testdata/hendrycksTest-econometrics-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"hendrycksTest-econometrics": {"acc": 0.24561403508771928, "acc_norm": 0.24561403508771928, "acc_norm_stderr": 0.04049339297748142, "acc_stderr": 0.040493392977481425}}, "versions": {"hendrycksTest-econometrics": 0}}
|
lm-evaluation/tests/testdata/hendrycksTest-high_school_biology-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
d4dc051f37a49dc75c218741e87bc826fd44f31ee1309b55e0f33bd191c1bc78
|
lm-evaluation/tests/testdata/hendrycksTest-high_school_physics-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
dae59e82d3d4d8dec82239d9620b72cc47bb6efbe2f1c2f9b9d23e849c9c5e32
|
lm-evaluation/tests/testdata/hendrycksTest-high_school_statistics-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
33d1d6eaaa2c3a944bf49d3f220a4efc328d7c3b3465b7cec40ae36d8984b75f
|
lm-evaluation/tests/testdata/hendrycksTest-high_school_us_history-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
8c65c1a28330dd001d395ac11f1bb80c3b33f5935f503e74067aef6e9e1d9d9b
|
lm-evaluation/tests/testdata/hendrycksTest-human_aging-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"hendrycksTest-human_aging": {"acc": 0.21524663677130046, "acc_norm": 0.17937219730941703, "acc_norm_stderr": 0.025749819569192804, "acc_stderr": 0.02758406660220827}}, "versions": {"hendrycksTest-human_aging": 0}}
|
lm-evaluation/tests/testdata/hendrycksTest-international_law-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"hendrycksTest-international_law": {"acc": 0.2396694214876033, "acc_norm": 0.3140495867768595, "acc_norm_stderr": 0.042369647530410164, "acc_stderr": 0.03896878985070417}}, "versions": {"hendrycksTest-international_law": 0}}
|
lm-evaluation/tests/testdata/hendrycksTest-marketing-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
b4fa0681fe54671a80509779d4338d744097a7206687f62977df7145dfa74a66
|
lm-evaluation/tests/testdata/hendrycksTest-sociology-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
f99a3caece11169f2a5cc951001f92027104afd25d29b2a399883bd4bf118605
|
lm-evaluation/tests/testdata/lambada-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"lambada": {"acc": 0.0, "acc_stderr": 0.0, "ppl": 1.6479047769869253, "ppl_stderr": 0.006497321146240192}}, "versions": {"lambada": 0}}
|
lm-evaluation/tests/testdata/lambada_mt_fr-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
5d16f4a0c51dc6d7b6df2ebeba2bbfa51e700b843779b559b3d90183d7b02a11
|
lm-evaluation/tests/testdata/lambada_openai-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"lambada_openai": {"acc": 0.0, "acc_stderr": 0.0, "ppl": 1.6479047769869253, "ppl_stderr": 0.006497321146240192}}, "versions": {"lambada_openai": 0}}
|
lm-evaluation/tests/testdata/math_intermediate_algebra-v1-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"math_intermediate_algebra": {"acc": 0.0, "acc_stderr": 0.0}}, "versions": {"math_intermediate_algebra": 1}}
|
lm-evaluation/tests/testdata/math_num_theory-v1-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"math_num_theory": {"acc": 0.0, "acc_stderr": 0.0}}, "versions": {"math_num_theory": 1}}
|
lm-evaluation/tests/testdata/mathqa-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
a45260e49f02c7cb8886b3746db4d388890860b202dd8a9f0267e3c324e0af13
|
lm-evaluation/tests/testdata/mc_taco-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
1811808ef05afd5f30ffc3471622a3dd7a1b681b17a2f7616695ad6b2a45943c
|
lm-evaluation/tests/testdata/mnli_mismatched-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"mnli_mismatched": {"acc": 0.3360455655004068, "acc_stderr": 0.004763973908606819}}, "versions": {"mnli_mismatched": 0}}
|
lm-evaluation/tests/testdata/multirc-v1-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"multirc": {"acc": 0.046169989506820566, "acc_stderr": 0.006801377886208738}}, "versions": {"multirc": 1}}
|
lm-evaluation/tests/testdata/mutual_plus-v1-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"mutual_plus": {"mrr": 0.5275583145221953, "mrr_stderr": 0.009940894824430708, "r@1": 0.26297968397291194, "r@1_stderr": 0.01479889176605113, "r@2": 0.5, "r@2_stderr": 0.01680731613632036}}, "versions": {"mutual_plus": 1}}
|
lm-evaluation/tests/testdata/pile_arxiv-v1-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"pile_arxiv": {"bits_per_byte": 1.55095665856779e-05, "byte_perplexity": 1.0000107504701365, "word_perplexity": 1.0000819333090385}}, "versions": {"pile_arxiv": 1}}
|
lm-evaluation/tests/testdata/pile_bookcorpus2-v1-loglikelihood_rolling
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
5c17ddfebeab8c41dabadb6fc216ceda91e3fe5dc95aaf1b2c843d7f11828b03
|
lm-evaluation/tests/testdata/pile_books3-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"pile_books3": {"bits_per_byte": 8.942486206275221e-07, "byte_perplexity": 1.0000008942490204, "word_perplexity": 1.0000052870063607}}, "versions": {"pile_books3": 0}}
|