Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- ckpts/universal/global_step40/zero/10.attention.query_key_value.weight/exp_avg_sq.pt +3 -0
- ckpts/universal/global_step40/zero/23.attention.query_key_value.weight/exp_avg.pt +3 -0
- lm-evaluation-harness/tests/testdata/anagrams2-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/anli_r2-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/arithmetic_3ds-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/arithmetic_5da-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/blimp_animate_subject_passive-v0-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/blimp_animate_subject_passive-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/blimp_coordinate_structure_constraint_complex_left_branch-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/blimp_inchoative-v0-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/blimp_intransitive-v0-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/blimp_npi_present_1-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/blimp_only_npi_scope-v0-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/blimp_principle_A_case_2-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/blimp_transitive-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/blimp_wh_vs_that_no_gap-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/cb-v0-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/crows_pairs_english_nationality-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/crows_pairs_english_socioeconomic-v0-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/crows_pairs_english_socioeconomic-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/crows_pairs_french_nationality-v0-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/crows_pairs_french_religion-v0-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/cycle_letters-v0-greedy_until +1 -0
- lm-evaluation-harness/tests/testdata/cycle_letters-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/ethics_cm-v0-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/gsm8k-v0-greedy_until +1 -0
- lm-evaluation-harness/tests/testdata/headqa-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/hendrycksTest-anatomy-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/hendrycksTest-clinical_knowledge-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/hendrycksTest-college_chemistry-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/hendrycksTest-college_computer_science-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/hendrycksTest-college_physics-v0-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/hendrycksTest-conceptual_physics-v0-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/hendrycksTest-elementary_mathematics-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/hendrycksTest-high_school_computer_science-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/hendrycksTest-high_school_government_and_politics-v0-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/hendrycksTest-high_school_statistics-v0-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/hendrycksTest-high_school_statistics-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/hendrycksTest-nutrition-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/hendrycksTest-professional_law-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/math_num_theory-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/math_prealgebra-v0-greedy_until +1 -0
- lm-evaluation-harness/tests/testdata/math_precalc-v1-greedy_until +1 -0
- lm-evaluation-harness/tests/testdata/mutual-v1-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/pile_bookcorpus2-v0-loglikelihood_rolling +1 -0
- lm-evaluation-harness/tests/testdata/pile_dm-mathematics-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/pile_dm-mathematics-v1-res.json +1 -0
- lm-evaluation-harness/tests/testdata/pile_enron-v0-loglikelihood_rolling +1 -0
- lm-evaluation-harness/tests/testdata/pile_github-v0-loglikelihood_rolling +1 -0
- lm-evaluation-harness/tests/testdata/pile_hackernews-v0-loglikelihood_rolling +1 -0
ckpts/universal/global_step40/zero/10.attention.query_key_value.weight/exp_avg_sq.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:08e3ab68253e99ba15290a82f87aa24013966f5d984803b9735b04cecf70a139
|
3 |
+
size 50332843
|
ckpts/universal/global_step40/zero/23.attention.query_key_value.weight/exp_avg.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:09eaa9225d4401ad0dd15f32d30d2931a2799a9d16ceb636430828c447f8e17e
|
3 |
+
size 50332828
|
lm-evaluation-harness/tests/testdata/anagrams2-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"anagrams2": {"acc": 0.0, "acc_stderr": 0.0}}, "versions": {"anagrams2": 0}}
|
lm-evaluation-harness/tests/testdata/anli_r2-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"anli_r2": {"acc": 0.356, "acc_stderr": 0.015149042659306628}}, "versions": {"anli_r2": 0}}
|
lm-evaluation-harness/tests/testdata/arithmetic_3ds-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"arithmetic_3ds": {"acc": 0.0, "acc_stderr": 0.0}}, "versions": {"arithmetic_3ds": 0}}
|
lm-evaluation-harness/tests/testdata/arithmetic_5da-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"arithmetic_5da": {"acc": 0.0, "acc_stderr": 0.0}}, "versions": {"arithmetic_5da": 0}}
|
lm-evaluation-harness/tests/testdata/blimp_animate_subject_passive-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
064c38fcd072b8bd12f54ea4f8e41599ed4e11dc386e93b77e1fc07967d1f960
|
lm-evaluation-harness/tests/testdata/blimp_animate_subject_passive-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"blimp_animate_subject_passive": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_animate_subject_passive": 0}}
|
lm-evaluation-harness/tests/testdata/blimp_coordinate_structure_constraint_complex_left_branch-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"blimp_coordinate_structure_constraint_complex_left_branch": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_coordinate_structure_constraint_complex_left_branch": 0}}
|
lm-evaluation-harness/tests/testdata/blimp_inchoative-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
3ff73629fb4473986a0e8ae2fcb7c40e88292189ab0d8755d20836c5aa5a2f99
|
lm-evaluation-harness/tests/testdata/blimp_intransitive-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
6469ae3b0d46b008846b5fd132f2d2b26ea2858745d056df1470b89aa97a790f
|
lm-evaluation-harness/tests/testdata/blimp_npi_present_1-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"blimp_npi_present_1": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_npi_present_1": 0}}
|
lm-evaluation-harness/tests/testdata/blimp_only_npi_scope-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
fc0be817478c212327050fa297ef61ad214f4847dbff61d4e0fe7914c06a1691
|
lm-evaluation-harness/tests/testdata/blimp_principle_A_case_2-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"blimp_principle_A_case_2": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_principle_A_case_2": 0}}
|
lm-evaluation-harness/tests/testdata/blimp_transitive-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"blimp_transitive": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_transitive": 0}}
|
lm-evaluation-harness/tests/testdata/blimp_wh_vs_that_no_gap-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"blimp_wh_vs_that_no_gap": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_wh_vs_that_no_gap": 0}}
|
lm-evaluation-harness/tests/testdata/cb-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
ec3b1bbb9561e39c43c6f77a23b4060b15c606141c5346e3d0791b3e92aaa5d0
|
lm-evaluation-harness/tests/testdata/crows_pairs_english_nationality-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"crows_pairs_english_nationality": {"likelihood_difference": 0.3383027778174895, "likelihood_difference_stderr": 0.015957585374543233, "pct_stereotype": 0.4675925925925926, "pct_stereotype_stderr": 0.03402801581358966}}, "versions": {"crows_pairs_english_nationality": 0}}
|
lm-evaluation-harness/tests/testdata/crows_pairs_english_socioeconomic-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
c309eabfd247a702e32efc4e08211f9a72693d38995be5dd444d497b476396bd
|
lm-evaluation-harness/tests/testdata/crows_pairs_english_socioeconomic-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"crows_pairs_english_socioeconomic": {"likelihood_difference": 0.3424577735757881, "likelihood_difference_stderr": 0.017459994170011896, "pct_stereotype": 0.46842105263157896, "pct_stereotype_stderr": 0.036297038088316094}}, "versions": {"crows_pairs_english_socioeconomic": 0}}
|
lm-evaluation-harness/tests/testdata/crows_pairs_french_nationality-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
146eb60c8796fe3f25307a6776337f0b077b58ce02edec64c99df4b906c19b9f
|
lm-evaluation-harness/tests/testdata/crows_pairs_french_religion-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
8af6445eeb634dad5f0723e40615afe993e1e3f129a4f314fe4117e633c2efd3
|
lm-evaluation-harness/tests/testdata/cycle_letters-v0-greedy_until
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
eb23f7d5de7528eefd8ed5f8054c402ff947319cccfef7195995946f99389201
|
lm-evaluation-harness/tests/testdata/cycle_letters-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"cycle_letters": {"acc": 0.0, "acc_stderr": 0.0}}, "versions": {"cycle_letters": 0}}
|
lm-evaluation-harness/tests/testdata/ethics_cm-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
92d136ebb2bd86cd036e61699ad9a1417dbb48651f0a3afa5045cf57cef5a3f6
|
lm-evaluation-harness/tests/testdata/gsm8k-v0-greedy_until
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
e7292dbdd7fd8419ba954f2e0701e04c8d0e8842fe053dbf2fe47d926630e35e
|
lm-evaluation-harness/tests/testdata/headqa-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"headqa": {"acc": 0.23559445660102116, "acc_norm": 0.25018234865062, "acc_norm_stderr": 0.008272783230806014, "acc_stderr": 0.008105688874297972}}, "versions": {"headqa": 0}}
|
lm-evaluation-harness/tests/testdata/hendrycksTest-anatomy-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"hendrycksTest-anatomy": {"acc": 0.2222222222222222, "acc_norm": 0.23703703703703705, "acc_norm_stderr": 0.03673731683969506, "acc_stderr": 0.0359144408419697}}, "versions": {"hendrycksTest-anatomy": 0}}
|
lm-evaluation-harness/tests/testdata/hendrycksTest-clinical_knowledge-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"hendrycksTest-clinical_knowledge": {"acc": 0.23773584905660378, "acc_norm": 0.27169811320754716, "acc_norm_stderr": 0.027377706624670713, "acc_stderr": 0.02619980880756191}}, "versions": {"hendrycksTest-clinical_knowledge": 0}}
|
lm-evaluation-harness/tests/testdata/hendrycksTest-college_chemistry-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"hendrycksTest-college_chemistry": {"acc": 0.28, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768078, "acc_stderr": 0.04512608598542127}}, "versions": {"hendrycksTest-college_chemistry": 0}}
|
lm-evaluation-harness/tests/testdata/hendrycksTest-college_computer_science-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"hendrycksTest-college_computer_science": {"acc": 0.22, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909282, "acc_stderr": 0.041633319989322695}}, "versions": {"hendrycksTest-college_computer_science": 0}}
|
lm-evaluation-harness/tests/testdata/hendrycksTest-college_physics-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
704a7671ef981fb95594782bc446dd632e87ebdbe89436a0603b714fb5786c75
|
lm-evaluation-harness/tests/testdata/hendrycksTest-conceptual_physics-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
622f191ccfc7a597d99f39897ebe3f95a9ddce0e662fcfb411aa554b289bb355
|
lm-evaluation-harness/tests/testdata/hendrycksTest-elementary_mathematics-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"hendrycksTest-elementary_mathematics": {"acc": 0.2724867724867725, "acc_norm": 0.2830687830687831, "acc_norm_stderr": 0.023201392938194978, "acc_stderr": 0.022930973071633345}}, "versions": {"hendrycksTest-elementary_mathematics": 0}}
|
lm-evaluation-harness/tests/testdata/hendrycksTest-high_school_computer_science-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"hendrycksTest-high_school_computer_science": {"acc": 0.2, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932269, "acc_stderr": 0.04020151261036845}}, "versions": {"hendrycksTest-high_school_computer_science": 0}}
|
lm-evaluation-harness/tests/testdata/hendrycksTest-high_school_government_and_politics-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
11f40d8f48ba5cd739e21d54c3c04d3761f81df5cb7ddd77df868d24ced44b49
|
lm-evaluation-harness/tests/testdata/hendrycksTest-high_school_statistics-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
33d1d6eaaa2c3a944bf49d3f220a4efc328d7c3b3465b7cec40ae36d8984b75f
|
lm-evaluation-harness/tests/testdata/hendrycksTest-high_school_statistics-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"hendrycksTest-high_school_statistics": {"acc": 0.2962962962962963, "acc_norm": 0.3055555555555556, "acc_norm_stderr": 0.03141554629402544, "acc_stderr": 0.03114144782353604}}, "versions": {"hendrycksTest-high_school_statistics": 0}}
|
lm-evaluation-harness/tests/testdata/hendrycksTest-nutrition-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"hendrycksTest-nutrition": {"acc": 0.24509803921568626, "acc_norm": 0.28104575163398693, "acc_norm_stderr": 0.025738854797818723, "acc_stderr": 0.02463004897982476}}, "versions": {"hendrycksTest-nutrition": 0}}
|
lm-evaluation-harness/tests/testdata/hendrycksTest-professional_law-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"hendrycksTest-professional_law": {"acc": 0.2561929595827901, "acc_norm": 0.2470664928292047, "acc_norm_stderr": 0.011015752255279352, "acc_stderr": 0.011149173153110582}}, "versions": {"hendrycksTest-professional_law": 0}}
|
lm-evaluation-harness/tests/testdata/math_num_theory-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"math_num_theory": {"acc": 0.0, "acc_stderr": 0.0}}, "versions": {"math_num_theory": 0}}
|
lm-evaluation-harness/tests/testdata/math_prealgebra-v0-greedy_until
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
752cdf343d7152e476b0273065024f6ea0e0f47ea385c6bdf9067736cb39724a
|
lm-evaluation-harness/tests/testdata/math_precalc-v1-greedy_until
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
bc834b06fd79473ca6fe38a51b714aad0bf0478c1b0eec787eca34dbdf69cb71
|
lm-evaluation-harness/tests/testdata/mutual-v1-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
f759213a28f0412510bf1a24c9cab0dae64bdee902d42a26225295445e7779db
|
lm-evaluation-harness/tests/testdata/pile_bookcorpus2-v0-loglikelihood_rolling
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
5c17ddfebeab8c41dabadb6fc216ceda91e3fe5dc95aaf1b2c843d7f11828b03
|
lm-evaluation-harness/tests/testdata/pile_dm-mathematics-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"pile_dm-mathematics": {"bits_per_byte": 6.176600873627999e-05, "byte_perplexity": 1.0000617679162955, "word_perplexity": 1.0002875035042451}}, "versions": {"pile_dm-mathematics": 0}}
|
lm-evaluation-harness/tests/testdata/pile_dm-mathematics-v1-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"pile_dm-mathematics": {"bits_per_byte": 8.910951449933553e-05, "byte_perplexity": 1.0000617679162955, "word_perplexity": 1.0002875035042451}}, "versions": {"pile_dm-mathematics": 1}}
|
lm-evaluation-harness/tests/testdata/pile_enron-v0-loglikelihood_rolling
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
4baa6ccdc9e3aa9921675ab4400d5e89d7b546b844a8ea28f6461d649066418a
|
lm-evaluation-harness/tests/testdata/pile_github-v0-loglikelihood_rolling
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
df384c3df3d8f53273e97127c5bb84c17e638acad7d6bc9c91f6dee96d43b639
|
lm-evaluation-harness/tests/testdata/pile_hackernews-v0-loglikelihood_rolling
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
ec1082ee5a5326e0d57aa4e73b634937140c1de9af95f154e8ab57b05d9b422b
|