5h
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- emb-norm/000-module.3.input_layernorm/events.out.tfevents.1638234920.r6i3n2.1319219.5 +3 -0
- emb-norm/000-module.3.mlp.dense_4h_to_h/events.out.tfevents.1638234921.r6i3n2.1319219.13 +3 -0
- emb-norm/000-module.3.mlp.dense_h_to_4h/events.out.tfevents.1638234921.r6i3n2.1319219.12 +3 -0
- emb-norm/000-module.3.mlp/events.out.tfevents.1638234921.r6i3n2.1319219.14 +3 -0
- emb-norm/000-module.3.post_attention_layernorm/events.out.tfevents.1638234921.r6i3n2.1319219.11 +3 -0
- emb-norm/000-module.3.self_attention.attention_dropout/events.out.tfevents.1638234920.r6i3n2.1319219.8 +3 -0
- emb-norm/000-module.3.self_attention.dense/events.out.tfevents.1638234920.r6i3n2.1319219.9 +3 -0
- emb-norm/000-module.3.self_attention.query_key_value/events.out.tfevents.1638234920.r6i3n2.1319219.6 +3 -0
- emb-norm/000-module.3.self_attention.scale_mask_softmax/events.out.tfevents.1638234920.r6i3n2.1319219.7 +3 -0
- emb-norm/000-module.3.self_attention/events.out.tfevents.1638234920.r6i3n2.1319219.10 +3 -0
- emb-norm/000-module.3/events.out.tfevents.1638234921.r6i3n2.1319219.15 +3 -0
- emb-norm/000-module.4.input_layernorm/events.out.tfevents.1638234921.r6i3n2.1319219.16 +3 -0
- emb-norm/000-module.4.mlp.dense_4h_to_h/events.out.tfevents.1638234921.r6i3n2.1319219.24 +3 -0
- emb-norm/000-module.4.mlp.dense_h_to_4h/events.out.tfevents.1638234921.r6i3n2.1319219.23 +3 -0
- emb-norm/000-module.4.mlp/events.out.tfevents.1638234921.r6i3n2.1319219.25 +3 -0
- emb-norm/000-module.4.post_attention_layernorm/events.out.tfevents.1638234921.r6i3n2.1319219.22 +3 -0
- emb-norm/000-module.4.self_attention.attention_dropout/events.out.tfevents.1638234921.r6i3n2.1319219.19 +3 -0
- emb-norm/000-module.4.self_attention.dense/events.out.tfevents.1638234921.r6i3n2.1319219.20 +3 -0
- emb-norm/000-module.4.self_attention.query_key_value/events.out.tfevents.1638234921.r6i3n2.1319219.17 +3 -0
- emb-norm/000-module.4.self_attention.scale_mask_softmax/events.out.tfevents.1638234921.r6i3n2.1319219.18 +3 -0
- emb-norm/000-module.4.self_attention/events.out.tfevents.1638234921.r6i3n2.1319219.21 +3 -0
- emb-norm/000-module.4/events.out.tfevents.1638234921.r6i3n2.1319219.26 +3 -0
- emb-norm/000-module.tied_modules.embed.embedding_dropout/events.out.tfevents.1638234920.r6i3n2.1319219.3 +3 -0
- emb-norm/000-module.tied_modules.embed.position_embeddings/events.out.tfevents.1638234920.r6i3n2.1319219.2 +3 -0
- emb-norm/000-module.tied_modules.embed.word_embeddings.norm/events.out.tfevents.1638234920.r6i3n2.1319219.0 +3 -0
- emb-norm/000-module.tied_modules.embed.word_embeddings/events.out.tfevents.1638234920.r6i3n2.1319219.1 +3 -0
- emb-norm/000-module.tied_modules.embed/events.out.tfevents.1638234920.r6i3n2.1319219.4 +3 -0
- emb-norm/000-module/events.out.tfevents.1638234921.r6i3n2.1319219.27 +3 -0
- emb-norm/056-module.17.input_layernorm/events.out.tfevents.1638234932.r6i5n3.61809.0 +3 -0
- emb-norm/056-module.17.mlp.dense_4h_to_h/events.out.tfevents.1638234933.r6i5n3.61809.8 +3 -0
- emb-norm/056-module.17.mlp.dense_h_to_4h/events.out.tfevents.1638234933.r6i5n3.61809.7 +3 -0
- emb-norm/056-module.17.mlp/events.out.tfevents.1638234933.r6i5n3.61809.9 +3 -0
- emb-norm/056-module.17.post_attention_layernorm/events.out.tfevents.1638234933.r6i5n3.61809.6 +3 -0
- emb-norm/056-module.17.self_attention.attention_dropout/events.out.tfevents.1638234932.r6i5n3.61809.3 +3 -0
- emb-norm/056-module.17.self_attention.dense/events.out.tfevents.1638234932.r6i5n3.61809.4 +3 -0
- emb-norm/056-module.17.self_attention.query_key_value/events.out.tfevents.1638234932.r6i5n3.61809.1 +3 -0
- emb-norm/056-module.17.self_attention.scale_mask_softmax/events.out.tfevents.1638234932.r6i5n3.61809.2 +3 -0
- emb-norm/056-module.17.self_attention/events.out.tfevents.1638234932.r6i5n3.61809.5 +3 -0
- emb-norm/056-module.17/events.out.tfevents.1638234933.r6i5n3.61809.10 +3 -0
- emb-norm/056-module.18.input_layernorm/events.out.tfevents.1638234933.r6i5n3.61809.11 +3 -0
- emb-norm/056-module.18.mlp.dense_4h_to_h/events.out.tfevents.1638234933.r6i5n3.61809.19 +3 -0
- emb-norm/056-module.18.mlp.dense_h_to_4h/events.out.tfevents.1638234933.r6i5n3.61809.18 +3 -0
- emb-norm/056-module.18.mlp/events.out.tfevents.1638234933.r6i5n3.61809.20 +3 -0
- emb-norm/056-module.18.post_attention_layernorm/events.out.tfevents.1638234933.r6i5n3.61809.17 +3 -0
- emb-norm/056-module.18.self_attention.attention_dropout/events.out.tfevents.1638234933.r6i5n3.61809.14 +3 -0
- emb-norm/056-module.18.self_attention.dense/events.out.tfevents.1638234933.r6i5n3.61809.15 +3 -0
- emb-norm/056-module.18.self_attention.query_key_value/events.out.tfevents.1638234933.r6i5n3.61809.12 +3 -0
- emb-norm/056-module.18.self_attention.scale_mask_softmax/events.out.tfevents.1638234933.r6i5n3.61809.13 +3 -0
- emb-norm/056-module.18.self_attention/events.out.tfevents.1638234933.r6i5n3.61809.16 +3 -0
- emb-norm/056-module.18/events.out.tfevents.1638234933.r6i5n3.61809.21 +3 -0
emb-norm/000-module.3.input_layernorm/events.out.tfevents.1638234920.r6i3n2.1319219.5
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:dd7a575b29528dca919c78d862b1a5d5c7b39c94ccbb4b077eaf785fb7edb6b7
|
3 |
+
size 95460328
|
emb-norm/000-module.3.mlp.dense_4h_to_h/events.out.tfevents.1638234921.r6i3n2.1319219.13
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7018cc2f5fa10ee0cee75b4b064a584f21ca1261fb74ca8a6888e37c4a8cfd30
|
3 |
+
size 96361960
|
emb-norm/000-module.3.mlp.dense_h_to_4h/events.out.tfevents.1638234921.r6i3n2.1319219.12
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ed7348bbb83d6da157f241768a08c26c93670ba678f64d1c64dadd5239a7b30a
|
3 |
+
size 96361960
|
emb-norm/000-module.3.mlp/events.out.tfevents.1638234921.r6i3n2.1319219.14
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f2ed5c9cd57ba5a69ec53531cf6e2817fb690d8f384dbd5f219bfd6556d861d4
|
3 |
+
size 56464744
|
emb-norm/000-module.3.post_attention_layernorm/events.out.tfevents.1638234921.r6i3n2.1319219.11
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b225cd8c8325a8e6a37f480f948ac4f7c42a9c57e763a333eb7cb039bd4ce50a
|
3 |
+
size 95460328
|
emb-norm/000-module.3.self_attention.attention_dropout/events.out.tfevents.1638234920.r6i3n2.1319219.8
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d5144848dc3744ff8ca2514dfde50a6b141f678f43e3763c069e67ed49d0e158
|
3 |
+
size 41587816
|
emb-norm/000-module.3.self_attention.dense/events.out.tfevents.1638234920.r6i3n2.1319219.9
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:dbc3c2cc4dcbab3e433809ce606340a93fdbd8711c6eea34e96ff6957816cc7e
|
3 |
+
size 96361960
|
emb-norm/000-module.3.self_attention.query_key_value/events.out.tfevents.1638234920.r6i3n2.1319219.6
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bc51af686450d29867044de81957983fbe081cfddd5d628b4b267fd342be118b
|
3 |
+
size 89148904
|
emb-norm/000-module.3.self_attention.scale_mask_softmax/events.out.tfevents.1638234920.r6i3n2.1319219.7
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3b9aad70056e54c203eee0de54d06a2330bbbf3650bb51982788b01247b8cadb
|
3 |
+
size 41587816
|
emb-norm/000-module.3.self_attention/events.out.tfevents.1638234920.r6i3n2.1319219.10
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9cafd612f62d1878b220490251ce43093b260b33e99822e8eabba4c3f30bca6b
|
3 |
+
size 56464744
|
emb-norm/000-module.3/events.out.tfevents.1638234921.r6i3n2.1319219.15
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:73dc9792eca51cf5d6b191e5e4e99c67dc87a00287d4ce2b9190210996e10e96
|
3 |
+
size 55563112
|
emb-norm/000-module.4.input_layernorm/events.out.tfevents.1638234921.r6i3n2.1319219.16
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7968345ffe83e004ac0ddcbe80bc53085c83d3c69ec82976fddf4dac8684f584
|
3 |
+
size 95460328
|
emb-norm/000-module.4.mlp.dense_4h_to_h/events.out.tfevents.1638234921.r6i3n2.1319219.24
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0a4ffc7486acfa92f8b95bb67a5051ffa6f2cc45699eb2afc7e1881723a6c882
|
3 |
+
size 96361960
|
emb-norm/000-module.4.mlp.dense_h_to_4h/events.out.tfevents.1638234921.r6i3n2.1319219.23
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c052e191f0eb1a7bef0b57375cacdce5f4fcb2d4db03a063bed874a2a8b1a263
|
3 |
+
size 96361960
|
emb-norm/000-module.4.mlp/events.out.tfevents.1638234921.r6i3n2.1319219.25
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:58bb52ca4728e7f38fdd73f741a6b3fe3a36d1746d5f2aedf726a52a5c80e92c
|
3 |
+
size 56464744
|
emb-norm/000-module.4.post_attention_layernorm/events.out.tfevents.1638234921.r6i3n2.1319219.22
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1363d696e9e4fe292481af1131a007898ee8268d9c8fd2d6956a8cfb4448a68a
|
3 |
+
size 95460328
|
emb-norm/000-module.4.self_attention.attention_dropout/events.out.tfevents.1638234921.r6i3n2.1319219.19
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:af12749f89c86a0e61737c089edc77c7beedbaabd2708ce6cc8a348119c9042f
|
3 |
+
size 41587816
|
emb-norm/000-module.4.self_attention.dense/events.out.tfevents.1638234921.r6i3n2.1319219.20
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:453721d7fcc30a21e9c30b8716ad79384743533d0182b98009119fd30544b512
|
3 |
+
size 96361960
|
emb-norm/000-module.4.self_attention.query_key_value/events.out.tfevents.1638234921.r6i3n2.1319219.17
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:31f0e78af26f8fe1459a45fb76f516c283aa6df15c12055f8acc3c4d52768938
|
3 |
+
size 89148904
|
emb-norm/000-module.4.self_attention.scale_mask_softmax/events.out.tfevents.1638234921.r6i3n2.1319219.18
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:19abb5f49a400e57b85ff2f106300323606834d87d4ae45be71368fb6276fdc1
|
3 |
+
size 41587816
|
emb-norm/000-module.4.self_attention/events.out.tfevents.1638234921.r6i3n2.1319219.21
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ce07e824b9399e4452e9d78953544ea7828c92e2f39928edd967f06b21cb1d87
|
3 |
+
size 56464744
|
emb-norm/000-module.4/events.out.tfevents.1638234921.r6i3n2.1319219.26
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:aff29ec3c59d3cac3d49073301526d5812a3ff8a8a3b48d5009b5f1c5c853c1d
|
3 |
+
size 55563112
|
emb-norm/000-module.tied_modules.embed.embedding_dropout/events.out.tfevents.1638234920.r6i3n2.1319219.3
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cbef8da9d84e68c2b4dd7f9dac3db4ef63411968fe529a2c54d564392ef4f3f1
|
3 |
+
size 27837928
|
emb-norm/000-module.tied_modules.embed.position_embeddings/events.out.tfevents.1638234920.r6i3n2.1319219.2
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:febdde68961be30187fdf28563cb3b71799d8020cb80014556a2b1f519f6e8be
|
3 |
+
size 34374760
|
emb-norm/000-module.tied_modules.embed.word_embeddings.norm/events.out.tfevents.1638234920.r6i3n2.1319219.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:97d86cbda6271c419aad3b9ace19b0f05c5290dae0d4b85222259e1ae4b65db7
|
3 |
+
size 68411368
|
emb-norm/000-module.tied_modules.embed.word_embeddings/events.out.tfevents.1638234920.r6i3n2.1319219.1
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e601206a038661448edb9e34bd19af0b50de35fe3c43e20234e98fabae2da7d1
|
3 |
+
size 48350056
|
emb-norm/000-module.tied_modules.embed/events.out.tfevents.1638234920.r6i3n2.1319219.4
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fe403bb83b2bb36eaa3b81f5761ae9748bca7bedaeef717ef836dc352c9102cc
|
3 |
+
size 20850280
|
emb-norm/000-module/events.out.tfevents.1638234921.r6i3n2.1319219.27
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:989c1d244b0b3f9101a9a7c709db7827176157321c6551b19501be78a9d19ccd
|
3 |
+
size 20850280
|
emb-norm/056-module.17.input_layernorm/events.out.tfevents.1638234932.r6i5n3.61809.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1d614435f7273e7ef057cbaeb76cac650e941a84b1fb8da30dd6edb415f5b562
|
3 |
+
size 95460328
|
emb-norm/056-module.17.mlp.dense_4h_to_h/events.out.tfevents.1638234933.r6i5n3.61809.8
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8b86123ccf4bac082cc5e27315f03acbfc6a023d2364da1f423672300ff3c191
|
3 |
+
size 96361960
|
emb-norm/056-module.17.mlp.dense_h_to_4h/events.out.tfevents.1638234933.r6i5n3.61809.7
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:290b91bf33e7a60223286ad393bdd19f43902c7d9b358ebb7eefa0cc1eadab1e
|
3 |
+
size 96361960
|
emb-norm/056-module.17.mlp/events.out.tfevents.1638234933.r6i5n3.61809.9
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b2623e2fc0600390d1d42531996df170bd97ae5f88c8fa9c5ba5bd41ccfa4513
|
3 |
+
size 56464744
|
emb-norm/056-module.17.post_attention_layernorm/events.out.tfevents.1638234933.r6i5n3.61809.6
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8f45244bc5b09e872ee552131f55b658d11735ce2a708793e9ed563a56526420
|
3 |
+
size 95460328
|
emb-norm/056-module.17.self_attention.attention_dropout/events.out.tfevents.1638234932.r6i5n3.61809.3
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f071bef544c1f3fba8bb35f49160fb7d538462e8347bd6b43ad726ee456e8a76
|
3 |
+
size 41587816
|
emb-norm/056-module.17.self_attention.dense/events.out.tfevents.1638234932.r6i5n3.61809.4
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:11c18e9424b187ab698bbdbefa970936d2edff0e441af25d7dd59177595b6ce2
|
3 |
+
size 96361960
|
emb-norm/056-module.17.self_attention.query_key_value/events.out.tfevents.1638234932.r6i5n3.61809.1
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9a0fe59bb381d982fbef86b8cb77890315c9bcaa9f33adb58cc19e7fb5e345ed
|
3 |
+
size 89148904
|
emb-norm/056-module.17.self_attention.scale_mask_softmax/events.out.tfevents.1638234932.r6i5n3.61809.2
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cf19590c207bf0c4d85345f4705062c8097411bd5e5838416efaad7ebae502f6
|
3 |
+
size 41587816
|
emb-norm/056-module.17.self_attention/events.out.tfevents.1638234932.r6i5n3.61809.5
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fcad6e92831246a2d8ce0065db57079dbca4f79c9c07b72960cf4c6448faac50
|
3 |
+
size 56464744
|
emb-norm/056-module.17/events.out.tfevents.1638234933.r6i5n3.61809.10
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d43498857bf975f37693f2bcc2bc19c0967c18069cccf68cc9378bac6251df42
|
3 |
+
size 55563112
|
emb-norm/056-module.18.input_layernorm/events.out.tfevents.1638234933.r6i5n3.61809.11
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4ecb8cd1263850cac5ea7ea08d918f2de051d808a094e6c33c438ea72ddb8cdd
|
3 |
+
size 95460328
|
emb-norm/056-module.18.mlp.dense_4h_to_h/events.out.tfevents.1638234933.r6i5n3.61809.19
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2c859a91931bd04811aa8b2cc451ab5985786a5ecc80562f967634c6c3f29ae6
|
3 |
+
size 96361960
|
emb-norm/056-module.18.mlp.dense_h_to_4h/events.out.tfevents.1638234933.r6i5n3.61809.18
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7824e9680ad7f38d93152d89669e4946d3be84a49479ead243e954849ee4bada
|
3 |
+
size 96361960
|
emb-norm/056-module.18.mlp/events.out.tfevents.1638234933.r6i5n3.61809.20
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d2c0a10c215b17aeefa2ee1477b59b3992cdfe13e551cf2af983863a5f8b66a9
|
3 |
+
size 56464744
|
emb-norm/056-module.18.post_attention_layernorm/events.out.tfevents.1638234933.r6i5n3.61809.17
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5f84e9aaaa7ff7b61b0ee49e07f5a55f39e0783b5e977c7ef78b8b2f0ca1b7d0
|
3 |
+
size 95460328
|
emb-norm/056-module.18.self_attention.attention_dropout/events.out.tfevents.1638234933.r6i5n3.61809.14
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bc5d5527bb376dec972a1b672c3b0d1a37c58fdd315392ffd05ce05b36fb8ada
|
3 |
+
size 41587816
|
emb-norm/056-module.18.self_attention.dense/events.out.tfevents.1638234933.r6i5n3.61809.15
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:67e4a9f7707efeb07ba34708fefb226f22194b7af72f5d0ec9cea9729e4dc9a2
|
3 |
+
size 96361960
|
emb-norm/056-module.18.self_attention.query_key_value/events.out.tfevents.1638234933.r6i5n3.61809.12
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:12aebe2e8c5b0eaa669db8702a405820987465cf11dbdec15ab446be4f60a1f2
|
3 |
+
size 89148904
|
emb-norm/056-module.18.self_attention.scale_mask_softmax/events.out.tfevents.1638234933.r6i5n3.61809.13
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2d27874c5c171fd9b428daff83b8b61b31e9f41c7b6b32869cbde16134dea32e
|
3 |
+
size 41587816
|
emb-norm/056-module.18.self_attention/events.out.tfevents.1638234933.r6i5n3.61809.16
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a9c0e468161870586dfcf3cfae07e2b901e631f662359c51e727d424f08955e7
|
3 |
+
size 56464744
|
emb-norm/056-module.18/events.out.tfevents.1638234933.r6i5n3.61809.21
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9d4b87b8d976485ce1c8deb08ab4f033df0c2be95597604c82328bc08e517a8a
|
3 |
+
size 55563112
|