Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +3 -0
- ckpts/universal/global_step20/mp_rank_01_model_states.pt +3 -0
- ckpts/universal/global_step20/mp_rank_02_model_states.pt +3 -0
- ckpts/universal/global_step40/zero/11.attention.query_key_value.weight/exp_avg.pt +3 -0
- ckpts/universal/global_step40/zero/11.attention.query_key_value.weight/fp32.pt +3 -0
- lm-evaluation-harness/tests/testdata/anli_r1-v0-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/arc_challenge-v2.0-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/arithmetic_4ds-v0-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/blimp_adjunct_island-v0-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/blimp_animate_subject_trans-v0-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/blimp_coordinate_structure_constraint_complex_left_branch-v0-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/blimp_coordinate_structure_constraint_object_extraction-v0-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/blimp_determiner_noun_agreement_2-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/blimp_determiner_noun_agreement_with_adj_irregular_2-v0-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/blimp_existential_there_quantifiers_2-v0-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/blimp_inchoative-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/blimp_principle_A_case_1-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/blimp_regular_plural_subject_verb_agreement_2-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/blimp_superlative_quantifiers_1-v0-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/boolq-v1-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/cb-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/cola-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/crows_pairs_english-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/crows_pairs_english_disability-v0-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/crows_pairs_english_physical_appearance-v0-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/crows_pairs_english_race_color-v0-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/crows_pairs_french-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/crows_pairs_french_autre-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/crows_pairs_french_physical_appearance-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/hendrycksTest-astronomy-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/hendrycksTest-clinical_knowledge-v0-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/hendrycksTest-college_biology-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/hendrycksTest-college_physics-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/hendrycksTest-computer_security-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/hendrycksTest-formal_logic-v0-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/hendrycksTest-high_school_european_history-v0-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/hendrycksTest-high_school_physics-v0-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/hendrycksTest-human_aging-v0-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/hendrycksTest-management-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/hendrycksTest-miscellaneous-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/hendrycksTest-philosophy-v0-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/hendrycksTest-security_studies-v0-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/lambada-v0-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/lambada_openai_mt_fr-v0-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/lambada_standard_cloze-v0-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/math_geometry-v1-greedy_until +1 -0
- lm-evaluation-harness/tests/testdata/math_prealgebra-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/mc_taco-v0-res.json +1 -0
- lm-evaluation-harness/tests/testdata/mnli-v0-loglikelihood +1 -0
- lm-evaluation-harness/tests/testdata/mnli_mismatched-v0-res.json +1 -0
.gitattributes
CHANGED
@@ -148,3 +148,6 @@ venv/lib/python3.10/site-packages/triton/third_party/cuda/bin/ptxas filter=lfs d
|
|
148 |
venv/lib/python3.10/site-packages/torch/bin/protoc filter=lfs diff=lfs merge=lfs -text
|
149 |
venv/lib/python3.10/site-packages/torch/bin/protoc-3.13.0.0 filter=lfs diff=lfs merge=lfs -text
|
150 |
venv/lib/python3.10/site-packages/triton/third_party/cuda/bin/nvdisasm filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
148 |
venv/lib/python3.10/site-packages/torch/bin/protoc filter=lfs diff=lfs merge=lfs -text
|
149 |
venv/lib/python3.10/site-packages/torch/bin/protoc-3.13.0.0 filter=lfs diff=lfs merge=lfs -text
|
150 |
venv/lib/python3.10/site-packages/triton/third_party/cuda/bin/nvdisasm filter=lfs diff=lfs merge=lfs -text
|
151 |
+
venv/lib/python3.10/site-packages/torch/lib/libtorch_python.so filter=lfs diff=lfs merge=lfs -text
|
152 |
+
venv/lib/python3.10/site-packages/torch/lib/libcusparseLt-f80c68d1.so.0 filter=lfs diff=lfs merge=lfs -text
|
153 |
+
venv/lib/python3.10/site-packages/pyarrow/libarrow_acero.so.1600 filter=lfs diff=lfs merge=lfs -text
|
ckpts/universal/global_step20/mp_rank_01_model_states.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3054d4175f039469ad55167090ec83f5389bbb93415dfb894c919cf1d0555cf3
|
3 |
+
size 4230020
|
ckpts/universal/global_step20/mp_rank_02_model_states.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:28bc7d19f509edecc0170ba2ab6c7411a909cff777d8e91fdb2d92928e4d9dae
|
3 |
+
size 4230020
|
ckpts/universal/global_step40/zero/11.attention.query_key_value.weight/exp_avg.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2002b66f9e3a20583660bc8a5d42d749a3e00b8bb7f4587d2a9c913a390d20eb
|
3 |
+
size 50332828
|
ckpts/universal/global_step40/zero/11.attention.query_key_value.weight/fp32.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a57df360496dae10c25dd7c1428cd53a02ed910f6deb63a9a06f298b2a3f88b1
|
3 |
+
size 50332749
|
lm-evaluation-harness/tests/testdata/anli_r1-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
3a84baf2f170e138c6ce0bc9f06f905def35d705fa2b8781f10c87aef404c4cb
|
lm-evaluation-harness/tests/testdata/arc_challenge-v2.0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
8ebbbc510644ede7bf53496c381e276d5a1eec14828870e8b7e611f231e6d5f6
|
lm-evaluation-harness/tests/testdata/arithmetic_4ds-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
d915830b8621e66331383bb2ae4c60acebf008e2f94741092ef4c33ea5441037
|
lm-evaluation-harness/tests/testdata/blimp_adjunct_island-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
976a5cac4bdb724632eebd4cb9e522203ce3da8d5525288a597c86e80469f3f2
|
lm-evaluation-harness/tests/testdata/blimp_animate_subject_trans-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
2a84231e7b79f517427e57e2099c88fed3d60a7efab4ef9506e263b4091d5cfa
|
lm-evaluation-harness/tests/testdata/blimp_coordinate_structure_constraint_complex_left_branch-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
7e1cc5b9f71abfbe56c4bdf343a1e5632785b66a986b8e904a41ed8f45a2c33e
|
lm-evaluation-harness/tests/testdata/blimp_coordinate_structure_constraint_object_extraction-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
23ddafdff7b1ebe331b146e23b2c21aa109fe57aa1ce8ca201a0d239fcbdd166
|
lm-evaluation-harness/tests/testdata/blimp_determiner_noun_agreement_2-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"blimp_determiner_noun_agreement_2": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_determiner_noun_agreement_2": 0}}
|
lm-evaluation-harness/tests/testdata/blimp_determiner_noun_agreement_with_adj_irregular_2-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
ccc64b4d5e80c081d5161aae5828212ba49d277ca8c5a4281f181744727a6a99
|
lm-evaluation-harness/tests/testdata/blimp_existential_there_quantifiers_2-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
6e6add7baff4217f383425bef58288202018e041b24084edcaa5df8af08f820c
|
lm-evaluation-harness/tests/testdata/blimp_inchoative-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"blimp_inchoative": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_inchoative": 0}}
|
lm-evaluation-harness/tests/testdata/blimp_principle_A_case_1-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"blimp_principle_A_case_1": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_principle_A_case_1": 0}}
|
lm-evaluation-harness/tests/testdata/blimp_regular_plural_subject_verb_agreement_2-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"blimp_regular_plural_subject_verb_agreement_2": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_regular_plural_subject_verb_agreement_2": 0}}
|
lm-evaluation-harness/tests/testdata/blimp_superlative_quantifiers_1-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
8a01f6a5ea87a01c0c9b0c7b3bc4de4711bf0ff050976976651182b9ed34a0d4
|
lm-evaluation-harness/tests/testdata/boolq-v1-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
6577e0d88572772ef08e64f624c0e3df0953286ae1f118ccef15623b59ffeabf
|
lm-evaluation-harness/tests/testdata/cb-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"cb": {"acc": 0.3392857142857143, "acc_stderr": 0.06384226561930825, "f1": 0.2819143819143819}}, "versions": {"cb": 0}}
|
lm-evaluation-harness/tests/testdata/cola-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"cola": {"mcc": -0.04538802810223175, "mcc_stderr": 0.023100371589225246}}, "versions": {"cola": 0}}
|
lm-evaluation-harness/tests/testdata/crows_pairs_english-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"crows_pairs_english": {"likelihood_difference": 0.3367363060632734, "likelihood_difference_stderr": 0.005827747024053628, "pct_stereotype": 0.5062611806797853, "pct_stereotype_stderr": 0.012212341600228745}}, "versions": {"crows_pairs_english": 0}}
|
lm-evaluation-harness/tests/testdata/crows_pairs_english_disability-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
90c1bcfdeec0ff51d891ee8cf00ae2a5ec61bab6739faea9865809b8ffed2cdb
|
lm-evaluation-harness/tests/testdata/crows_pairs_english_physical_appearance-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
d1823f5038afafa7a5338e42531720480c8ccf4e177789526caf294d52d56e89
|
lm-evaluation-harness/tests/testdata/crows_pairs_english_race_color-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
0a750596d77cd96502dc414ff699a399b1b91c2078adeec1d3dd982b3d591089
|
lm-evaluation-harness/tests/testdata/crows_pairs_french-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"crows_pairs_french": {"likelihood_difference": 0.3367363060632734, "likelihood_difference_stderr": 0.005827747024053628, "pct_stereotype": 0.5062611806797853, "pct_stereotype_stderr": 0.012212341600228745}}, "versions": {"crows_pairs_french": 0}}
|
lm-evaluation-harness/tests/testdata/crows_pairs_french_autre-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"crows_pairs_french_autre": {"likelihood_difference": 0.3517045997290783, "likelihood_difference_stderr": 0.07647821858130377, "pct_stereotype": 0.23076923076923078, "pct_stereotype_stderr": 0.12162606385262997}}, "versions": {"crows_pairs_french_autre": 0}}
|
lm-evaluation-harness/tests/testdata/crows_pairs_french_physical_appearance-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"crows_pairs_french_physical_appearance": {"likelihood_difference": 0.3221673223187262, "likelihood_difference_stderr": 0.026978346460100555, "pct_stereotype": 0.4027777777777778, "pct_stereotype_stderr": 0.05820650942569533}}, "versions": {"crows_pairs_french_physical_appearance": 0}}
|
lm-evaluation-harness/tests/testdata/hendrycksTest-astronomy-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"hendrycksTest-astronomy": {"acc": 0.2565789473684211, "acc_norm": 0.29605263157894735, "acc_norm_stderr": 0.03715062154998904, "acc_stderr": 0.0355418036802569}}, "versions": {"hendrycksTest-astronomy": 0}}
|
lm-evaluation-harness/tests/testdata/hendrycksTest-clinical_knowledge-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
fbcb7ce507e0675d811e71e10a67c8d05a6605e29036f46776e04a6588cefbda
|
lm-evaluation-harness/tests/testdata/hendrycksTest-college_biology-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"hendrycksTest-college_biology": {"acc": 0.24305555555555555, "acc_norm": 0.2361111111111111, "acc_norm_stderr": 0.03551446610810826, "acc_stderr": 0.03586879280080341}}, "versions": {"hendrycksTest-college_biology": 0}}
|
lm-evaluation-harness/tests/testdata/hendrycksTest-college_physics-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"hendrycksTest-college_physics": {"acc": 0.23529411764705882, "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.04220773659171453, "acc_stderr": 0.04220773659171452}}, "versions": {"hendrycksTest-college_physics": 0}}
|
lm-evaluation-harness/tests/testdata/hendrycksTest-computer_security-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"hendrycksTest-computer_security": {"acc": 0.24, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394, "acc_stderr": 0.042923469599092816}}, "versions": {"hendrycksTest-computer_security": 0}}
|
lm-evaluation-harness/tests/testdata/hendrycksTest-formal_logic-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
c0d0f0c008a5f3faf2f6f4268d87bbc09c40bb66ae08cf38eea0bf2e519c5a59
|
lm-evaluation-harness/tests/testdata/hendrycksTest-high_school_european_history-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
d8070e113be9d420fef5578cb69c70df4ea5118f9b18553023fd9efd5ff0b7f4
|
lm-evaluation-harness/tests/testdata/hendrycksTest-high_school_physics-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
dae59e82d3d4d8dec82239d9620b72cc47bb6efbe2f1c2f9b9d23e849c9c5e32
|
lm-evaluation-harness/tests/testdata/hendrycksTest-human_aging-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
0880b3a78f8d7b17ffc612031427b9085367cf65dabe2a68c4b64e3171d17e88
|
lm-evaluation-harness/tests/testdata/hendrycksTest-management-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"hendrycksTest-management": {"acc": 0.24271844660194175, "acc_norm": 0.2621359223300971, "acc_norm_stderr": 0.043546310772605956, "acc_stderr": 0.04245022486384495}}, "versions": {"hendrycksTest-management": 0}}
|
lm-evaluation-harness/tests/testdata/hendrycksTest-miscellaneous-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"hendrycksTest-miscellaneous": {"acc": 0.23499361430395913, "acc_norm": 0.2515964240102171, "acc_norm_stderr": 0.015517322365529622, "acc_stderr": 0.015162024152278445}}, "versions": {"hendrycksTest-miscellaneous": 0}}
|
lm-evaluation-harness/tests/testdata/hendrycksTest-philosophy-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
a419204da36c2b7a70fa8909a3a804260cc3283c7e07917534dfb76216c77f46
|
lm-evaluation-harness/tests/testdata/hendrycksTest-security_studies-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
92dfffe2acf3278256486d3e1cf1edb5a739ad0a54c0f9c67695f7a411ed5f76
|
lm-evaluation-harness/tests/testdata/lambada-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
6829e6a8aa5922e6c92dd31403cc060f242dc0ede4a775e085a70da095ab2e20
|
lm-evaluation-harness/tests/testdata/lambada_openai_mt_fr-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
5d16f4a0c51dc6d7b6df2ebeba2bbfa51e700b843779b559b3d90183d7b02a11
|
lm-evaluation-harness/tests/testdata/lambada_standard_cloze-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
b604f00bc9f2a77ef41f8cfdb5a8509b3ae9266893b9e90abc665f5399ecba4e
|
lm-evaluation-harness/tests/testdata/math_geometry-v1-greedy_until
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
46bc4cb219b6903397da782699a684bdbb982c0c954ff82e6beeed5c84878f42
|
lm-evaluation-harness/tests/testdata/math_prealgebra-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"math_prealgebra": {"acc": 0.0, "acc_stderr": 0.0}}, "versions": {"math_prealgebra": 0}}
|
lm-evaluation-harness/tests/testdata/mc_taco-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"mc_taco": {"em": 0.07732732732732733, "f1": 0.41600515965511614}}, "versions": {"mc_taco": 0}}
|
lm-evaluation-harness/tests/testdata/mnli-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
4fc7b56b8f1e37e38f4a052b227baec2df914c898c3405d3e994726ba4fba976
|
lm-evaluation-harness/tests/testdata/mnli_mismatched-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"mnli_mismatched": {"acc": 0.3360455655004068, "acc_stderr": 0.004763973908606819}}, "versions": {"mnli_mismatched": 0}}
|