Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- llama13b_5M/checkpoints_zero_stage_2/global_step0/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt +3 -0
- llama13b_5M/checkpoints_zero_stage_2/global_step0/bf16_zero_pp_rank_0_mp_rank_01_optim_states.pt +3 -0
- llama13b_5M/checkpoints_zero_stage_2/global_step0/bf16_zero_pp_rank_0_mp_rank_03_optim_states.pt +3 -0
- llama13b_5M/checkpoints_zero_stage_2/global_step0/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt +3 -0
- llama13b_5M/checkpoints_zero_stage_2/global_step0/bf16_zero_pp_rank_1_mp_rank_01_optim_states.pt +3 -0
- llama13b_5M/checkpoints_zero_stage_2/global_step0/bf16_zero_pp_rank_1_mp_rank_02_optim_states.pt +3 -0
- llama13b_5M/checkpoints_zero_stage_2/global_step0/bf16_zero_pp_rank_1_mp_rank_03_optim_states.pt +3 -0
- llama13b_600M/26-04-2024-09:36:12/hf_ckpt/global_step1000/tokenizer.model +3 -0
- llama13b_600M/26-04-2024-09:36:12/hf_ckpt/global_step10000/pytorch_model.bin +3 -0
- llama13b_600M/26-04-2024-09:36:12/hf_ckpt/global_step2000/tokenizer.model +3 -0
- llama13b_600M/26-04-2024-09:36:12/hf_ckpt/global_step2500/pytorch_model.bin +3 -0
- llama13b_600M/26-04-2024-09:36:12/hf_ckpt/global_step3000/tokenizer.model +3 -0
- llama13b_600M/26-04-2024-09:36:12/hf_ckpt/global_step3500/tokenizer.model +3 -0
- llama13b_600M/26-04-2024-09:36:12/hf_ckpt/global_step4000/pytorch_model.bin +3 -0
- llama13b_600M/26-04-2024-09:36:12/hf_ckpt/global_step4500/pytorch_model.bin +3 -0
- llama13b_600M/26-04-2024-09:36:12/hf_ckpt/global_step500/tokenizer.model +3 -0
- llama13b_600M/26-04-2024-09:36:12/hf_ckpt/global_step5500/tokenizer.model +3 -0
- llama13b_600M/26-04-2024-09:36:12/hf_ckpt/global_step6000/tokenizer.model +3 -0
- llama13b_600M/26-04-2024-09:36:12/hf_ckpt/global_step6500/pytorch_model.bin +3 -0
- llama13b_600M/26-04-2024-09:36:12/tensorboard/events.out.tfevents.1714124183.peacock-3.60258.0 +3 -0
- llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/mp_rank_00_model_states.pt +3 -0
- llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/10.attention.dense.weight/exp_avg.pt +3 -0
- llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/10.attention.dense.weight/exp_avg_sq.pt +3 -0
- llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/10.attention.dense.weight/fp32.pt +3 -0
- llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/10.mlp.dense_4h_to_h.weight/exp_avg.pt +3 -0
- llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/10.mlp.dense_4h_to_h.weight/exp_avg_sq.pt +3 -0
- llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/10.mlp.dense_4h_to_h.weight/fp32.pt +3 -0
- llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/10.mlp.dense_h_to_4h.weight/exp_avg.pt +3 -0
- llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/10.mlp.dense_h_to_4h.weight/exp_avg_sq.pt +3 -0
- llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/10.mlp.dense_h_to_4h.weight/fp32.pt +3 -0
- llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/12.attention.query_key_value.weight/exp_avg.pt +3 -0
- llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/12.attention.query_key_value.weight/exp_avg_sq.pt +3 -0
- llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/12.attention.query_key_value.weight/fp32.pt +3 -0
- llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/12.post_attention_layernorm.weight/exp_avg.pt +3 -0
- llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/12.post_attention_layernorm.weight/exp_avg_sq.pt +3 -0
- llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/12.post_attention_layernorm.weight/fp32.pt +3 -0
- llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/13.attention.dense.weight/exp_avg.pt +3 -0
- llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/13.attention.dense.weight/exp_avg_sq.pt +3 -0
- llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/13.attention.dense.weight/fp32.pt +3 -0
- llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/13.post_attention_layernorm.weight/exp_avg.pt +3 -0
- llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/13.post_attention_layernorm.weight/exp_avg_sq.pt +3 -0
- llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/13.post_attention_layernorm.weight/fp32.pt +3 -0
- llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/14.attention.query_key_value.weight/exp_avg.pt +3 -0
- llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/14.attention.query_key_value.weight/exp_avg_sq.pt +3 -0
- llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/14.attention.query_key_value.weight/fp32.pt +3 -0
- llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/15.attention.dense.weight/exp_avg.pt +3 -0
- llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/15.attention.dense.weight/exp_avg_sq.pt +3 -0
- llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/15.attention.dense.weight/fp32.pt +3 -0
- llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/15.mlp.dense_h_to_4h.weight/exp_avg.pt +3 -0
- llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/15.mlp.dense_h_to_4h.weight/exp_avg_sq.pt +3 -0
llama13b_5M/checkpoints_zero_stage_2/global_step0/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7ca33ace5dd24acd3d1ba85ac55782c625e991eabf98e900ef504ad64ebb158e
|
3 |
+
size 8387794764
|
llama13b_5M/checkpoints_zero_stage_2/global_step0/bf16_zero_pp_rank_0_mp_rank_01_optim_states.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ffc0eb923c156990898bdbbb9962c133143bfb5af9a1a67953d5f21bb17fb992
|
3 |
+
size 8387794764
|
llama13b_5M/checkpoints_zero_stage_2/global_step0/bf16_zero_pp_rank_0_mp_rank_03_optim_states.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4cffc68d3c14b81ecde8c807fb9aa71935effb74201c78c3a61b4434426fe33d
|
3 |
+
size 8387825740
|
llama13b_5M/checkpoints_zero_stage_2/global_step0/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:674b1b2ba341f68da990d172c68fa6b6475131c381c14fef0b97e67a0c827883
|
3 |
+
size 8387794892
|
llama13b_5M/checkpoints_zero_stage_2/global_step0/bf16_zero_pp_rank_1_mp_rank_01_optim_states.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:22374eafb548b3adfaa7bdc5ac6572bce5ce077b8de1619a1b4e639b7ef69053
|
3 |
+
size 8387794892
|
llama13b_5M/checkpoints_zero_stage_2/global_step0/bf16_zero_pp_rank_1_mp_rank_02_optim_states.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:31374324220b8be40a4607d8305e8f770e63ad501b4af666db7d6726e13c4d79
|
3 |
+
size 8387825548
|
llama13b_5M/checkpoints_zero_stage_2/global_step0/bf16_zero_pp_rank_1_mp_rank_03_optim_states.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b40234baa310796117fe7329a3d5440c1428e31ac78d4ebc9b09e892c9eb0af8
|
3 |
+
size 8387825548
|
llama13b_600M/26-04-2024-09:36:12/hf_ckpt/global_step1000/tokenizer.model
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
|
3 |
+
size 499723
|
llama13b_600M/26-04-2024-09:36:12/hf_ckpt/global_step10000/pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:126225f67d4f7da220d83a5cc4b58310ae7947c482d5d24798ddc4c3c07d28a8
|
3 |
+
size 4851203286
|
llama13b_600M/26-04-2024-09:36:12/hf_ckpt/global_step2000/tokenizer.model
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
|
3 |
+
size 499723
|
llama13b_600M/26-04-2024-09:36:12/hf_ckpt/global_step2500/pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9f8637791dabd79d4ffe0ca583fb07d0e2ee10964674e29b13ca2c1ab842e275
|
3 |
+
size 4851203286
|
llama13b_600M/26-04-2024-09:36:12/hf_ckpt/global_step3000/tokenizer.model
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
|
3 |
+
size 499723
|
llama13b_600M/26-04-2024-09:36:12/hf_ckpt/global_step3500/tokenizer.model
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
|
3 |
+
size 499723
|
llama13b_600M/26-04-2024-09:36:12/hf_ckpt/global_step4000/pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a17c4d4509ef22483f1fc11dd03fc86a8e696365a4c4bd7a3bd39e85641cbb3f
|
3 |
+
size 4851203286
|
llama13b_600M/26-04-2024-09:36:12/hf_ckpt/global_step4500/pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:24dee1b9abdcddc9006a784e46b45fbb7fd4e9e4e52cbe9748466ac41b854314
|
3 |
+
size 4851203286
|
llama13b_600M/26-04-2024-09:36:12/hf_ckpt/global_step500/tokenizer.model
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
|
3 |
+
size 499723
|
llama13b_600M/26-04-2024-09:36:12/hf_ckpt/global_step5500/tokenizer.model
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
|
3 |
+
size 499723
|
llama13b_600M/26-04-2024-09:36:12/hf_ckpt/global_step6000/tokenizer.model
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
|
3 |
+
size 499723
|
llama13b_600M/26-04-2024-09:36:12/hf_ckpt/global_step6500/pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a6b99645f870679a2fe4becf98e01099e20dc04efadec7575fad3e4bcd59e108
|
3 |
+
size 4851203286
|
llama13b_600M/26-04-2024-09:36:12/tensorboard/events.out.tfevents.1714124183.peacock-3.60258.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7fa8974aa39e600e16d8863feff798c757b05f19dae43c36e4a4c116bd7eb6cb
|
3 |
+
size 15015945
|
llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/mp_rank_00_model_states.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d4b5c4d7e143a9c695acb11c954e08a575f436b2aa159f4449bab96d765b71cd
|
3 |
+
size 4234628
|
llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/10.attention.dense.weight/exp_avg.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d0bbc64ab226839f61582055110927458751fc68494efe85edf820a85a266ede
|
3 |
+
size 16778460
|
llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/10.attention.dense.weight/exp_avg_sq.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:373d442b970a9459ee46e57974da2a48c2fbe32635713dfacf01cbb690894c52
|
3 |
+
size 16778475
|
llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/10.attention.dense.weight/fp32.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:487d17973881947108b3d7e83ae6e090f58f29c46f5a75f2bd87847a8597d5e0
|
3 |
+
size 16778381
|
llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/10.mlp.dense_4h_to_h.weight/exp_avg.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6d4159081ec4bb9f313edd9e9713d931b8031d63d19ebcb98c88657c73d3242a
|
3 |
+
size 33555676
|
llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/10.mlp.dense_4h_to_h.weight/exp_avg_sq.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b755b9f30eb0693bb58c1e4e7f530c5969e932faf537f8c43173f63db6367019
|
3 |
+
size 33555691
|
llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/10.mlp.dense_4h_to_h.weight/fp32.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:615a96da0de3f33b4742e07a6646893ba5aae802895ccd97c47bae88740735aa
|
3 |
+
size 33555597
|
llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/10.mlp.dense_h_to_4h.weight/exp_avg.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:76b49e37fb7cafd1433664b6f148a0879a225199861ed79f5c1d6e6f986c0abf
|
3 |
+
size 33555676
|
llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/10.mlp.dense_h_to_4h.weight/exp_avg_sq.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:db0ba232bd1eeb4793240c14d20920d68e6136480669482d19f3264da625e493
|
3 |
+
size 33555691
|
llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/10.mlp.dense_h_to_4h.weight/fp32.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:696a942ea9c2be022c4c6a66ae0eea4b0e8c6bb740eb09a4f9552210b19bd049
|
3 |
+
size 33555597
|
llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/12.attention.query_key_value.weight/exp_avg.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0b3c82b354419d4d2ddbe83e6c317b52dbda4a9eb6bfdd7d8b86ca1033838dc3
|
3 |
+
size 50332892
|
llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/12.attention.query_key_value.weight/exp_avg_sq.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2baa9a858f7668285a97d1b3d2f8687b61c29dd1fbd386ef80689ba9c5b5dd3c
|
3 |
+
size 50332907
|
llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/12.attention.query_key_value.weight/fp32.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6f89eb907ba596d0bf09594f52c4799e11c88ec627d3fbf3937e5544f9a8ad0a
|
3 |
+
size 50332813
|
llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/12.post_attention_layernorm.weight/exp_avg.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:eb17bcd7187a49f6e4ee6e2a94020b813f40a6221de2ea45deecd24acf8a681d
|
3 |
+
size 9372
|
llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/12.post_attention_layernorm.weight/exp_avg_sq.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6619746da8d35ec9dbfd950e5a424074fde7055929c829e5590b192c496aa138
|
3 |
+
size 9387
|
llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/12.post_attention_layernorm.weight/fp32.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:757f97f8e3bbdc174252a0e622a19b55c65755d3d6bcd5503aeaa6da364d60a1
|
3 |
+
size 9293
|
llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/13.attention.dense.weight/exp_avg.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:87c712fad0ab1a5522395df86dfe105e520fdb5cd02d2dfc8d081df17e9dbf6c
|
3 |
+
size 16778460
|
llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/13.attention.dense.weight/exp_avg_sq.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8dc3a8e08c174b0a59d6f53356c858b527765fbe0452441b01b1d0fa2ef0910e
|
3 |
+
size 16778475
|
llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/13.attention.dense.weight/fp32.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c94752c4e81ee6f0beb22760d091ce06076b26262778d42595a9c47fa2dadd6d
|
3 |
+
size 16778381
|
llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/13.post_attention_layernorm.weight/exp_avg.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ae0a702d5ff3d8444debba9e5a2c1fb2f0611b1790bcdcc7fe4bfadf9a0887f5
|
3 |
+
size 9372
|
llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/13.post_attention_layernorm.weight/exp_avg_sq.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fea5d337e304b476187d0febd0f680d4b93f2b0c111f7762cb7165df5267e087
|
3 |
+
size 9387
|
llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/13.post_attention_layernorm.weight/fp32.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c6bdccf353dca2bd799af13d644ea59532c1ed9955732848ad1c1a5f2fe7a67a
|
3 |
+
size 9293
|
llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/14.attention.query_key_value.weight/exp_avg.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fcbfe8730aee44e26dcea1ac28d39c883f84b0bde72ebce2ac18257791308577
|
3 |
+
size 50332892
|
llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/14.attention.query_key_value.weight/exp_avg_sq.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5859ca8d3e90867a6675d8dfb8d5bba4579f4b2bc4d9a4780d9311178ef3c2fb
|
3 |
+
size 50332907
|
llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/14.attention.query_key_value.weight/fp32.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d4ccc3575fb53ae3c133418e8f3117d31907d3d3291b281882da4fcb4cc2abf8
|
3 |
+
size 50332813
|
llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/15.attention.dense.weight/exp_avg.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:18bd632a73c7eceb51862b446fd11016099f6daa60b6ed0440752eea61d166f1
|
3 |
+
size 16778460
|
llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/15.attention.dense.weight/exp_avg_sq.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f973260528cda9b0a6248c2f8fdcf8fabe027b4a9fed67e11eefe7a3007acab2
|
3 |
+
size 16778475
|
llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/15.attention.dense.weight/fp32.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0c59eded37fca83551eb76f42ada551519dc4ddbce09154abce0e3bcd3457700
|
3 |
+
size 16778381
|
llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/15.mlp.dense_h_to_4h.weight/exp_avg.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ebb6a85364632a58c5d6a6af98f925d7145f441176870eac8567c1232c90e728
|
3 |
+
size 33555676
|
llama13b_600M/26-04-2024-09:36:12/universal/global_step5500/zero/15.mlp.dense_h_to_4h.weight/exp_avg_sq.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:17cf90e7524f9dda2314d0ac559f87aaa7e694c10ad4f700775d5c8aac7bd231
|
3 |
+
size 33555691
|