|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 48.0, |
|
"global_step": 6000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 3.6493, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 3.6538, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 3e-06, |
|
"loss": 3.6342, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 3.5774, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 5e-06, |
|
"loss": 3.5288, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 6e-06, |
|
"loss": 3.452, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 7.000000000000001e-06, |
|
"loss": 3.3685, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 3.2667, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 9e-06, |
|
"loss": 3.2796, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1e-05, |
|
"loss": 3.1435, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.1000000000000001e-05, |
|
"loss": 3.1206, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.2e-05, |
|
"loss": 3.0387, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.3000000000000001e-05, |
|
"loss": 2.9759, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4000000000000001e-05, |
|
"loss": 3.0132, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.5e-05, |
|
"loss": 2.9027, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 2.815, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.7000000000000003e-05, |
|
"loss": 2.8081, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.8e-05, |
|
"loss": 2.8926, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.9e-05, |
|
"loss": 2.8096, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2e-05, |
|
"loss": 2.7153, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.1e-05, |
|
"loss": 2.7193, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.2000000000000003e-05, |
|
"loss": 2.5646, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 2.3000000000000003e-05, |
|
"loss": 2.7471, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 2.4e-05, |
|
"loss": 2.7368, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 2.5e-05, |
|
"loss": 2.6167, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 2.6000000000000002e-05, |
|
"loss": 2.492, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 2.7000000000000002e-05, |
|
"loss": 2.5191, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 2.8000000000000003e-05, |
|
"loss": 2.5223, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.9e-05, |
|
"loss": 2.4294, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 3e-05, |
|
"loss": 2.3719, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 3.1e-05, |
|
"loss": 2.4148, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 2.4787, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 3.3e-05, |
|
"loss": 2.4021, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 3.4000000000000007e-05, |
|
"loss": 2.3982, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.5e-05, |
|
"loss": 2.28, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 3.6e-05, |
|
"loss": 2.4302, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 3.7e-05, |
|
"loss": 2.2597, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 3.8e-05, |
|
"loss": 2.0781, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 3.9000000000000006e-05, |
|
"loss": 2.0256, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 4e-05, |
|
"loss": 2.1566, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 4.1e-05, |
|
"loss": 2.0487, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 4.2e-05, |
|
"loss": 2.0661, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 4.3e-05, |
|
"loss": 2.0816, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 2.1713, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 4.5e-05, |
|
"loss": 2.151, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 4.600000000000001e-05, |
|
"loss": 1.9657, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 4.7e-05, |
|
"loss": 2.0014, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 4.8e-05, |
|
"loss": 1.9713, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 4.9e-05, |
|
"loss": 1.936, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 5e-05, |
|
"loss": 1.9813, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 4.9913043478260876e-05, |
|
"loss": 1.7488, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 4.9826086956521736e-05, |
|
"loss": 1.7985, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 4.973913043478261e-05, |
|
"loss": 1.7398, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 4.9652173913043483e-05, |
|
"loss": 1.5946, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 4.956521739130435e-05, |
|
"loss": 1.879, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 4.947826086956522e-05, |
|
"loss": 1.8759, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 4.939130434782609e-05, |
|
"loss": 1.7932, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 4.930434782608696e-05, |
|
"loss": 1.7335, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 4.9217391304347824e-05, |
|
"loss": 1.7787, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 4.91304347826087e-05, |
|
"loss": 1.8258, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 4.904347826086957e-05, |
|
"loss": 1.7937, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 4.895652173913044e-05, |
|
"loss": 1.734, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"learning_rate": 4.8869565217391305e-05, |
|
"loss": 1.5941, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"learning_rate": 4.878260869565218e-05, |
|
"loss": 1.6789, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 4.8695652173913046e-05, |
|
"loss": 1.4186, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"learning_rate": 4.860869565217391e-05, |
|
"loss": 1.5457, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 4.8521739130434786e-05, |
|
"loss": 1.5282, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"learning_rate": 4.843478260869565e-05, |
|
"loss": 1.5594, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"learning_rate": 4.834782608695652e-05, |
|
"loss": 1.7068, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 4.8260869565217394e-05, |
|
"loss": 1.5521, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 4.817391304347826e-05, |
|
"loss": 1.5973, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"learning_rate": 4.8086956521739134e-05, |
|
"loss": 1.5842, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"learning_rate": 4.8e-05, |
|
"loss": 1.5819, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"learning_rate": 4.7913043478260875e-05, |
|
"loss": 1.6313, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 4.782608695652174e-05, |
|
"loss": 1.5262, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 6.08, |
|
"learning_rate": 4.773913043478261e-05, |
|
"loss": 1.3722, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 4.765217391304348e-05, |
|
"loss": 1.2905, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"learning_rate": 4.756521739130435e-05, |
|
"loss": 1.4479, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"learning_rate": 4.747826086956522e-05, |
|
"loss": 1.424, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"learning_rate": 4.739130434782609e-05, |
|
"loss": 1.3006, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"learning_rate": 4.7304347826086956e-05, |
|
"loss": 1.4776, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 4.721739130434783e-05, |
|
"loss": 1.4928, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"learning_rate": 4.71304347826087e-05, |
|
"loss": 1.4294, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"learning_rate": 4.7043478260869564e-05, |
|
"loss": 1.386, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"learning_rate": 4.695652173913044e-05, |
|
"loss": 1.4618, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 4.686956521739131e-05, |
|
"loss": 1.4186, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"learning_rate": 4.678260869565218e-05, |
|
"loss": 1.6092, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 7.04, |
|
"learning_rate": 4.6695652173913045e-05, |
|
"loss": 1.3375, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 7.12, |
|
"learning_rate": 4.660869565217392e-05, |
|
"loss": 1.2338, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"learning_rate": 4.6521739130434785e-05, |
|
"loss": 1.3258, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"learning_rate": 4.643478260869565e-05, |
|
"loss": 1.2257, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"learning_rate": 4.6347826086956526e-05, |
|
"loss": 1.2538, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 7.44, |
|
"learning_rate": 4.62608695652174e-05, |
|
"loss": 1.3203, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 7.52, |
|
"learning_rate": 4.617391304347826e-05, |
|
"loss": 1.2229, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 7.6, |
|
"learning_rate": 4.608695652173913e-05, |
|
"loss": 1.31, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 7.68, |
|
"learning_rate": 4.600000000000001e-05, |
|
"loss": 1.3021, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 7.76, |
|
"learning_rate": 4.591304347826087e-05, |
|
"loss": 1.3159, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"learning_rate": 4.582608695652174e-05, |
|
"loss": 1.3946, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"learning_rate": 4.5739130434782614e-05, |
|
"loss": 1.4731, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"learning_rate": 4.565217391304348e-05, |
|
"loss": 1.4642, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 8.08, |
|
"learning_rate": 4.556521739130435e-05, |
|
"loss": 1.173, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 8.16, |
|
"learning_rate": 4.547826086956522e-05, |
|
"loss": 1.1411, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 8.24, |
|
"learning_rate": 4.539130434782609e-05, |
|
"loss": 1.1576, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 8.32, |
|
"learning_rate": 4.5304347826086955e-05, |
|
"loss": 1.1805, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 8.4, |
|
"learning_rate": 4.521739130434783e-05, |
|
"loss": 1.1661, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 8.48, |
|
"learning_rate": 4.51304347826087e-05, |
|
"loss": 1.1878, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 8.56, |
|
"learning_rate": 4.504347826086956e-05, |
|
"loss": 1.2674, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 8.64, |
|
"learning_rate": 4.4956521739130436e-05, |
|
"loss": 1.1968, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 8.72, |
|
"learning_rate": 4.486956521739131e-05, |
|
"loss": 1.3326, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"learning_rate": 4.478260869565218e-05, |
|
"loss": 1.1905, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 8.88, |
|
"learning_rate": 4.4695652173913044e-05, |
|
"loss": 1.3127, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 8.96, |
|
"learning_rate": 4.460869565217392e-05, |
|
"loss": 1.3091, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 9.04, |
|
"learning_rate": 4.4521739130434784e-05, |
|
"loss": 1.1955, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 9.12, |
|
"learning_rate": 4.443478260869565e-05, |
|
"loss": 1.0431, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 9.2, |
|
"learning_rate": 4.4347826086956525e-05, |
|
"loss": 1.0506, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 9.28, |
|
"learning_rate": 4.426086956521739e-05, |
|
"loss": 1.0924, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 9.36, |
|
"learning_rate": 4.4173913043478265e-05, |
|
"loss": 1.1698, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 9.44, |
|
"learning_rate": 4.408695652173913e-05, |
|
"loss": 1.2168, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 9.52, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 1.1931, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 9.6, |
|
"learning_rate": 4.391304347826087e-05, |
|
"loss": 1.2466, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 9.68, |
|
"learning_rate": 4.382608695652174e-05, |
|
"loss": 1.1826, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 9.76, |
|
"learning_rate": 4.373913043478261e-05, |
|
"loss": 1.1994, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 9.84, |
|
"learning_rate": 4.365217391304348e-05, |
|
"loss": 1.116, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 9.92, |
|
"learning_rate": 4.3565217391304353e-05, |
|
"loss": 1.1418, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 4.347826086956522e-05, |
|
"loss": 1.2596, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 10.08, |
|
"learning_rate": 4.339130434782609e-05, |
|
"loss": 1.0032, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 10.16, |
|
"learning_rate": 4.330434782608696e-05, |
|
"loss": 1.0681, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 10.24, |
|
"learning_rate": 4.321739130434783e-05, |
|
"loss": 1.0991, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 10.32, |
|
"learning_rate": 4.3130434782608695e-05, |
|
"loss": 0.9904, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 10.4, |
|
"learning_rate": 4.304347826086957e-05, |
|
"loss": 1.037, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 10.48, |
|
"learning_rate": 4.2956521739130435e-05, |
|
"loss": 1.1417, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 10.56, |
|
"learning_rate": 4.286956521739131e-05, |
|
"loss": 1.0919, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 10.64, |
|
"learning_rate": 4.2782608695652176e-05, |
|
"loss": 1.0887, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 10.72, |
|
"learning_rate": 4.269565217391305e-05, |
|
"loss": 1.1425, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 10.8, |
|
"learning_rate": 4.2608695652173916e-05, |
|
"loss": 1.2263, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 10.88, |
|
"learning_rate": 4.252173913043478e-05, |
|
"loss": 1.2244, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 10.96, |
|
"learning_rate": 4.2434782608695657e-05, |
|
"loss": 1.0981, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 11.04, |
|
"learning_rate": 4.2347826086956523e-05, |
|
"loss": 1.1665, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 11.12, |
|
"learning_rate": 4.226086956521739e-05, |
|
"loss": 0.9729, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 11.2, |
|
"learning_rate": 4.2173913043478264e-05, |
|
"loss": 1.0235, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 11.28, |
|
"learning_rate": 4.208695652173914e-05, |
|
"loss": 1.0718, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 11.36, |
|
"learning_rate": 4.2e-05, |
|
"loss": 1.1057, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 11.44, |
|
"learning_rate": 4.191304347826087e-05, |
|
"loss": 1.0105, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 11.52, |
|
"learning_rate": 4.1826086956521745e-05, |
|
"loss": 1.0284, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 11.6, |
|
"learning_rate": 4.1739130434782605e-05, |
|
"loss": 0.9386, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 11.68, |
|
"learning_rate": 4.165217391304348e-05, |
|
"loss": 1.0335, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 11.76, |
|
"learning_rate": 4.156521739130435e-05, |
|
"loss": 1.125, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 11.84, |
|
"learning_rate": 4.147826086956522e-05, |
|
"loss": 1.1251, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 11.92, |
|
"learning_rate": 4.1391304347826086e-05, |
|
"loss": 1.1962, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"learning_rate": 4.130434782608696e-05, |
|
"loss": 1.1286, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 12.08, |
|
"learning_rate": 4.1217391304347827e-05, |
|
"loss": 0.9152, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 12.16, |
|
"learning_rate": 4.1130434782608693e-05, |
|
"loss": 0.9163, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 12.24, |
|
"learning_rate": 4.104347826086957e-05, |
|
"loss": 0.959, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 12.32, |
|
"learning_rate": 4.095652173913044e-05, |
|
"loss": 1.006, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 12.4, |
|
"learning_rate": 4.086956521739131e-05, |
|
"loss": 1.0188, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 12.48, |
|
"learning_rate": 4.0782608695652174e-05, |
|
"loss": 0.9931, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 12.56, |
|
"learning_rate": 4.069565217391305e-05, |
|
"loss": 1.0445, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 12.64, |
|
"learning_rate": 4.0608695652173915e-05, |
|
"loss": 1.0901, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 12.72, |
|
"learning_rate": 4.052173913043478e-05, |
|
"loss": 0.9693, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 12.8, |
|
"learning_rate": 4.0434782608695655e-05, |
|
"loss": 1.1191, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 12.88, |
|
"learning_rate": 4.034782608695652e-05, |
|
"loss": 1.1188, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 12.96, |
|
"learning_rate": 4.026086956521739e-05, |
|
"loss": 1.1422, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 13.04, |
|
"learning_rate": 4.017391304347826e-05, |
|
"loss": 1.0245, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 13.12, |
|
"learning_rate": 4.008695652173913e-05, |
|
"loss": 0.9048, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 13.2, |
|
"learning_rate": 4e-05, |
|
"loss": 0.925, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 13.28, |
|
"learning_rate": 3.991304347826087e-05, |
|
"loss": 0.8803, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 13.36, |
|
"learning_rate": 3.9826086956521744e-05, |
|
"loss": 0.9284, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 13.44, |
|
"learning_rate": 3.973913043478261e-05, |
|
"loss": 0.9985, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 13.52, |
|
"learning_rate": 3.965217391304348e-05, |
|
"loss": 1.0461, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 13.6, |
|
"learning_rate": 3.956521739130435e-05, |
|
"loss": 0.9542, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 13.68, |
|
"learning_rate": 3.947826086956522e-05, |
|
"loss": 0.9822, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 13.76, |
|
"learning_rate": 3.939130434782609e-05, |
|
"loss": 1.0781, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 13.84, |
|
"learning_rate": 3.930434782608696e-05, |
|
"loss": 1.0549, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 13.92, |
|
"learning_rate": 3.9217391304347825e-05, |
|
"loss": 1.0676, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"learning_rate": 3.91304347826087e-05, |
|
"loss": 1.1258, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 14.08, |
|
"learning_rate": 3.9043478260869566e-05, |
|
"loss": 0.8544, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 14.16, |
|
"learning_rate": 3.895652173913043e-05, |
|
"loss": 0.9229, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 14.24, |
|
"learning_rate": 3.8869565217391306e-05, |
|
"loss": 0.9222, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 14.32, |
|
"learning_rate": 3.878260869565218e-05, |
|
"loss": 0.9293, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 14.4, |
|
"learning_rate": 3.869565217391305e-05, |
|
"loss": 0.9189, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 14.48, |
|
"learning_rate": 3.8608695652173914e-05, |
|
"loss": 0.9336, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 14.56, |
|
"learning_rate": 3.852173913043479e-05, |
|
"loss": 0.9787, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 14.64, |
|
"learning_rate": 3.8434782608695654e-05, |
|
"loss": 1.0282, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 14.72, |
|
"learning_rate": 3.834782608695652e-05, |
|
"loss": 0.9987, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 14.8, |
|
"learning_rate": 3.8260869565217395e-05, |
|
"loss": 1.0231, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 14.88, |
|
"learning_rate": 3.817391304347827e-05, |
|
"loss": 1.0009, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 14.96, |
|
"learning_rate": 3.808695652173913e-05, |
|
"loss": 1.0475, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 15.04, |
|
"learning_rate": 3.8e-05, |
|
"loss": 0.9605, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 15.12, |
|
"learning_rate": 3.7913043478260876e-05, |
|
"loss": 0.8575, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 15.2, |
|
"learning_rate": 3.7826086956521736e-05, |
|
"loss": 0.8875, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 15.28, |
|
"learning_rate": 3.773913043478261e-05, |
|
"loss": 0.9066, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 15.36, |
|
"learning_rate": 3.765217391304348e-05, |
|
"loss": 0.9074, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 15.44, |
|
"learning_rate": 3.756521739130435e-05, |
|
"loss": 0.9326, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 15.52, |
|
"learning_rate": 3.747826086956522e-05, |
|
"loss": 0.8979, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 15.6, |
|
"learning_rate": 3.739130434782609e-05, |
|
"loss": 0.9302, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 15.68, |
|
"learning_rate": 3.730434782608696e-05, |
|
"loss": 0.9764, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 15.76, |
|
"learning_rate": 3.7217391304347824e-05, |
|
"loss": 0.9775, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 15.84, |
|
"learning_rate": 3.71304347826087e-05, |
|
"loss": 0.9807, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 15.92, |
|
"learning_rate": 3.704347826086957e-05, |
|
"loss": 1.0364, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"learning_rate": 3.695652173913043e-05, |
|
"loss": 1.0756, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 16.08, |
|
"learning_rate": 3.6869565217391305e-05, |
|
"loss": 0.8401, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 16.16, |
|
"learning_rate": 3.678260869565218e-05, |
|
"loss": 0.8194, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 16.24, |
|
"learning_rate": 3.6695652173913046e-05, |
|
"loss": 0.9139, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 16.32, |
|
"learning_rate": 3.660869565217391e-05, |
|
"loss": 0.8998, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 16.4, |
|
"learning_rate": 3.6521739130434786e-05, |
|
"loss": 0.9026, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 16.48, |
|
"learning_rate": 3.643478260869565e-05, |
|
"loss": 0.9809, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 16.56, |
|
"learning_rate": 3.634782608695652e-05, |
|
"loss": 0.9175, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 16.64, |
|
"learning_rate": 3.6260869565217394e-05, |
|
"loss": 0.953, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 16.72, |
|
"learning_rate": 3.617391304347826e-05, |
|
"loss": 0.9542, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 16.8, |
|
"learning_rate": 3.6086956521739134e-05, |
|
"loss": 1.0157, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 16.88, |
|
"learning_rate": 3.6e-05, |
|
"loss": 0.9836, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 16.96, |
|
"learning_rate": 3.5913043478260875e-05, |
|
"loss": 1.043, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 17.04, |
|
"learning_rate": 3.582608695652174e-05, |
|
"loss": 0.8193, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 17.12, |
|
"learning_rate": 3.573913043478261e-05, |
|
"loss": 0.8391, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 17.2, |
|
"learning_rate": 3.565217391304348e-05, |
|
"loss": 0.8211, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 17.28, |
|
"learning_rate": 3.556521739130435e-05, |
|
"loss": 0.8447, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 17.36, |
|
"learning_rate": 3.5478260869565216e-05, |
|
"loss": 0.8472, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 17.44, |
|
"learning_rate": 3.539130434782609e-05, |
|
"loss": 0.8897, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 17.52, |
|
"learning_rate": 3.5304347826086956e-05, |
|
"loss": 0.8916, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 17.6, |
|
"learning_rate": 3.521739130434783e-05, |
|
"loss": 0.9339, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 17.68, |
|
"learning_rate": 3.51304347826087e-05, |
|
"loss": 0.9641, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 17.76, |
|
"learning_rate": 3.5043478260869564e-05, |
|
"loss": 0.9489, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 17.84, |
|
"learning_rate": 3.495652173913044e-05, |
|
"loss": 0.9923, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 17.92, |
|
"learning_rate": 3.4869565217391304e-05, |
|
"loss": 0.954, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"learning_rate": 3.478260869565218e-05, |
|
"loss": 0.9964, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 18.08, |
|
"learning_rate": 3.4695652173913045e-05, |
|
"loss": 0.8709, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 18.16, |
|
"learning_rate": 3.460869565217392e-05, |
|
"loss": 0.8586, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 18.24, |
|
"learning_rate": 3.4521739130434785e-05, |
|
"loss": 0.799, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 18.32, |
|
"learning_rate": 3.443478260869565e-05, |
|
"loss": 0.7715, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 18.4, |
|
"learning_rate": 3.4347826086956526e-05, |
|
"loss": 0.8854, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 18.48, |
|
"learning_rate": 3.426086956521739e-05, |
|
"loss": 0.9446, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 18.56, |
|
"learning_rate": 3.417391304347826e-05, |
|
"loss": 0.892, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 18.64, |
|
"learning_rate": 3.408695652173913e-05, |
|
"loss": 0.9439, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 18.72, |
|
"learning_rate": 3.4000000000000007e-05, |
|
"loss": 0.8857, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 18.8, |
|
"learning_rate": 3.3913043478260867e-05, |
|
"loss": 0.9497, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 18.88, |
|
"learning_rate": 3.382608695652174e-05, |
|
"loss": 0.9948, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 18.96, |
|
"learning_rate": 3.3739130434782614e-05, |
|
"loss": 0.9594, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 19.04, |
|
"learning_rate": 3.365217391304348e-05, |
|
"loss": 0.9142, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 19.12, |
|
"learning_rate": 3.356521739130435e-05, |
|
"loss": 0.8201, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 19.2, |
|
"learning_rate": 3.347826086956522e-05, |
|
"loss": 0.8425, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 19.28, |
|
"learning_rate": 3.339130434782609e-05, |
|
"loss": 0.7843, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 19.36, |
|
"learning_rate": 3.3304347826086955e-05, |
|
"loss": 0.9149, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 19.44, |
|
"learning_rate": 3.321739130434783e-05, |
|
"loss": 0.7852, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 19.52, |
|
"learning_rate": 3.31304347826087e-05, |
|
"loss": 0.8849, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 19.6, |
|
"learning_rate": 3.304347826086956e-05, |
|
"loss": 0.9418, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 19.68, |
|
"learning_rate": 3.2956521739130436e-05, |
|
"loss": 0.938, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 19.76, |
|
"learning_rate": 3.286956521739131e-05, |
|
"loss": 0.9208, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 19.84, |
|
"learning_rate": 3.278260869565217e-05, |
|
"loss": 0.9258, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 19.92, |
|
"learning_rate": 3.269565217391304e-05, |
|
"loss": 0.8696, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"learning_rate": 3.260869565217392e-05, |
|
"loss": 0.9085, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 20.08, |
|
"learning_rate": 3.2521739130434784e-05, |
|
"loss": 0.8414, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 20.16, |
|
"learning_rate": 3.243478260869565e-05, |
|
"loss": 0.8509, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 20.24, |
|
"learning_rate": 3.2347826086956524e-05, |
|
"loss": 0.7594, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 20.32, |
|
"learning_rate": 3.226086956521739e-05, |
|
"loss": 0.8413, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 20.4, |
|
"learning_rate": 3.217391304347826e-05, |
|
"loss": 0.8918, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 20.48, |
|
"learning_rate": 3.208695652173913e-05, |
|
"loss": 0.8074, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 20.56, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 0.8728, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 20.64, |
|
"learning_rate": 3.191304347826087e-05, |
|
"loss": 0.8559, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 20.72, |
|
"learning_rate": 3.182608695652174e-05, |
|
"loss": 0.8778, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 20.8, |
|
"learning_rate": 3.173913043478261e-05, |
|
"loss": 0.9155, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 20.88, |
|
"learning_rate": 3.165217391304348e-05, |
|
"loss": 0.9692, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 20.96, |
|
"learning_rate": 3.1565217391304346e-05, |
|
"loss": 0.9685, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 21.04, |
|
"learning_rate": 3.147826086956522e-05, |
|
"loss": 0.9113, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 21.12, |
|
"learning_rate": 3.139130434782609e-05, |
|
"loss": 0.7723, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 21.2, |
|
"learning_rate": 3.130434782608696e-05, |
|
"loss": 0.8294, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 21.28, |
|
"learning_rate": 3.121739130434783e-05, |
|
"loss": 0.7949, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 21.36, |
|
"learning_rate": 3.1130434782608694e-05, |
|
"loss": 0.8509, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 21.44, |
|
"learning_rate": 3.104347826086957e-05, |
|
"loss": 0.8727, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 21.52, |
|
"learning_rate": 3.0956521739130435e-05, |
|
"loss": 0.8094, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 21.6, |
|
"learning_rate": 3.086956521739131e-05, |
|
"loss": 0.8458, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 21.68, |
|
"learning_rate": 3.0782608695652175e-05, |
|
"loss": 0.8416, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 21.76, |
|
"learning_rate": 3.069565217391305e-05, |
|
"loss": 0.8336, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 21.84, |
|
"learning_rate": 3.0608695652173916e-05, |
|
"loss": 0.9844, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 21.92, |
|
"learning_rate": 3.052173913043478e-05, |
|
"loss": 0.8814, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 22.0, |
|
"learning_rate": 3.0434782608695656e-05, |
|
"loss": 0.917, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 22.08, |
|
"learning_rate": 3.034782608695652e-05, |
|
"loss": 0.8319, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 22.16, |
|
"learning_rate": 3.0260869565217393e-05, |
|
"loss": 0.7807, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 22.24, |
|
"learning_rate": 3.0173913043478264e-05, |
|
"loss": 0.8231, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 22.32, |
|
"learning_rate": 3.008695652173913e-05, |
|
"loss": 0.7917, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 22.4, |
|
"learning_rate": 3e-05, |
|
"loss": 0.8658, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 22.48, |
|
"learning_rate": 2.991304347826087e-05, |
|
"loss": 0.8434, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 22.56, |
|
"learning_rate": 2.982608695652174e-05, |
|
"loss": 0.7944, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 22.64, |
|
"learning_rate": 2.9739130434782608e-05, |
|
"loss": 0.8425, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 22.72, |
|
"learning_rate": 2.965217391304348e-05, |
|
"loss": 0.9204, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 22.8, |
|
"learning_rate": 2.9565217391304352e-05, |
|
"loss": 0.8919, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 22.88, |
|
"learning_rate": 2.9478260869565215e-05, |
|
"loss": 0.8238, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 22.96, |
|
"learning_rate": 2.939130434782609e-05, |
|
"loss": 0.8917, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 23.04, |
|
"learning_rate": 2.930434782608696e-05, |
|
"loss": 0.886, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 23.12, |
|
"learning_rate": 2.921739130434783e-05, |
|
"loss": 0.8199, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 23.2, |
|
"learning_rate": 2.9130434782608696e-05, |
|
"loss": 0.7968, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 23.28, |
|
"learning_rate": 2.9043478260869567e-05, |
|
"loss": 0.8332, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 23.36, |
|
"learning_rate": 2.8956521739130437e-05, |
|
"loss": 0.8201, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 23.44, |
|
"learning_rate": 2.8869565217391304e-05, |
|
"loss": 0.8029, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 23.52, |
|
"learning_rate": 2.8782608695652174e-05, |
|
"loss": 0.7901, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 23.6, |
|
"learning_rate": 2.8695652173913044e-05, |
|
"loss": 0.8657, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 23.68, |
|
"learning_rate": 2.8608695652173918e-05, |
|
"loss": 0.8878, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 23.76, |
|
"learning_rate": 2.852173913043478e-05, |
|
"loss": 0.8956, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 23.84, |
|
"learning_rate": 2.8434782608695655e-05, |
|
"loss": 0.8472, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 23.92, |
|
"learning_rate": 2.8347826086956525e-05, |
|
"loss": 0.8308, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 24.0, |
|
"learning_rate": 2.826086956521739e-05, |
|
"loss": 0.8754, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 24.08, |
|
"learning_rate": 2.8173913043478262e-05, |
|
"loss": 0.7869, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 24.16, |
|
"learning_rate": 2.8086956521739133e-05, |
|
"loss": 0.7275, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 24.24, |
|
"learning_rate": 2.8000000000000003e-05, |
|
"loss": 0.7898, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 24.32, |
|
"learning_rate": 2.791304347826087e-05, |
|
"loss": 0.781, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 24.4, |
|
"learning_rate": 2.782608695652174e-05, |
|
"loss": 0.8289, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 24.48, |
|
"learning_rate": 2.773913043478261e-05, |
|
"loss": 0.8367, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 24.56, |
|
"learning_rate": 2.7652173913043477e-05, |
|
"loss": 0.8201, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 24.64, |
|
"learning_rate": 2.7565217391304347e-05, |
|
"loss": 0.8339, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 24.72, |
|
"learning_rate": 2.747826086956522e-05, |
|
"loss": 0.8857, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 24.8, |
|
"learning_rate": 2.7391304347826085e-05, |
|
"loss": 0.8688, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 24.88, |
|
"learning_rate": 2.7304347826086958e-05, |
|
"loss": 0.8792, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 24.96, |
|
"learning_rate": 2.721739130434783e-05, |
|
"loss": 0.8835, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 25.04, |
|
"learning_rate": 2.71304347826087e-05, |
|
"loss": 0.8183, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 25.12, |
|
"learning_rate": 2.7043478260869566e-05, |
|
"loss": 0.772, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 25.2, |
|
"learning_rate": 2.6956521739130436e-05, |
|
"loss": 0.7913, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 25.28, |
|
"learning_rate": 2.6869565217391306e-05, |
|
"loss": 0.782, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 25.36, |
|
"learning_rate": 2.6782608695652173e-05, |
|
"loss": 0.7693, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 25.44, |
|
"learning_rate": 2.6695652173913043e-05, |
|
"loss": 0.814, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 25.52, |
|
"learning_rate": 2.6608695652173913e-05, |
|
"loss": 0.771, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 25.6, |
|
"learning_rate": 2.6521739130434787e-05, |
|
"loss": 0.8702, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 25.68, |
|
"learning_rate": 2.643478260869565e-05, |
|
"loss": 0.8653, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 25.76, |
|
"learning_rate": 2.6347826086956524e-05, |
|
"loss": 0.8006, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 25.84, |
|
"learning_rate": 2.6260869565217394e-05, |
|
"loss": 0.8638, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 25.92, |
|
"learning_rate": 2.617391304347826e-05, |
|
"loss": 0.8425, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 26.0, |
|
"learning_rate": 2.608695652173913e-05, |
|
"loss": 0.8959, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 26.08, |
|
"learning_rate": 2.6000000000000002e-05, |
|
"loss": 0.7395, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 26.16, |
|
"learning_rate": 2.5913043478260872e-05, |
|
"loss": 0.7737, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 26.24, |
|
"learning_rate": 2.582608695652174e-05, |
|
"loss": 0.7703, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 26.32, |
|
"learning_rate": 2.573913043478261e-05, |
|
"loss": 0.7901, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 26.4, |
|
"learning_rate": 2.5652173913043483e-05, |
|
"loss": 0.8004, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 26.48, |
|
"learning_rate": 2.5565217391304346e-05, |
|
"loss": 0.8087, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 26.56, |
|
"learning_rate": 2.5478260869565217e-05, |
|
"loss": 0.8025, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 26.64, |
|
"learning_rate": 2.539130434782609e-05, |
|
"loss": 0.8545, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 26.72, |
|
"learning_rate": 2.530434782608696e-05, |
|
"loss": 0.8215, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 26.8, |
|
"learning_rate": 2.5217391304347827e-05, |
|
"loss": 0.7817, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 26.88, |
|
"learning_rate": 2.5130434782608698e-05, |
|
"loss": 0.8443, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 26.96, |
|
"learning_rate": 2.5043478260869568e-05, |
|
"loss": 0.8381, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 27.04, |
|
"learning_rate": 2.4956521739130438e-05, |
|
"loss": 0.7931, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 27.12, |
|
"learning_rate": 2.4869565217391305e-05, |
|
"loss": 0.7931, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 27.2, |
|
"learning_rate": 2.4782608695652175e-05, |
|
"loss": 0.729, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 27.28, |
|
"learning_rate": 2.4695652173913045e-05, |
|
"loss": 0.8028, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 27.36, |
|
"learning_rate": 2.4608695652173912e-05, |
|
"loss": 0.7529, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 27.44, |
|
"learning_rate": 2.4521739130434786e-05, |
|
"loss": 0.825, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 27.52, |
|
"learning_rate": 2.4434782608695653e-05, |
|
"loss": 0.785, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 27.6, |
|
"learning_rate": 2.4347826086956523e-05, |
|
"loss": 0.8408, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 27.68, |
|
"learning_rate": 2.4260869565217393e-05, |
|
"loss": 0.8812, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 27.76, |
|
"learning_rate": 2.417391304347826e-05, |
|
"loss": 0.8036, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 27.84, |
|
"learning_rate": 2.408695652173913e-05, |
|
"loss": 0.8844, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 27.92, |
|
"learning_rate": 2.4e-05, |
|
"loss": 0.8191, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 28.0, |
|
"learning_rate": 2.391304347826087e-05, |
|
"loss": 0.8419, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 28.08, |
|
"learning_rate": 2.382608695652174e-05, |
|
"loss": 0.7546, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 28.16, |
|
"learning_rate": 2.373913043478261e-05, |
|
"loss": 0.74, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 28.24, |
|
"learning_rate": 2.3652173913043478e-05, |
|
"loss": 0.7936, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 28.32, |
|
"learning_rate": 2.356521739130435e-05, |
|
"loss": 0.7558, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 28.4, |
|
"learning_rate": 2.347826086956522e-05, |
|
"loss": 0.8113, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 28.48, |
|
"learning_rate": 2.339130434782609e-05, |
|
"loss": 0.793, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 28.56, |
|
"learning_rate": 2.330434782608696e-05, |
|
"loss": 0.7783, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 28.64, |
|
"learning_rate": 2.3217391304347826e-05, |
|
"loss": 0.825, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 28.72, |
|
"learning_rate": 2.31304347826087e-05, |
|
"loss": 0.7734, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 28.8, |
|
"learning_rate": 2.3043478260869567e-05, |
|
"loss": 0.7998, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 28.88, |
|
"learning_rate": 2.2956521739130433e-05, |
|
"loss": 0.8553, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 28.96, |
|
"learning_rate": 2.2869565217391307e-05, |
|
"loss": 0.8441, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 29.04, |
|
"learning_rate": 2.2782608695652174e-05, |
|
"loss": 0.7547, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 29.12, |
|
"learning_rate": 2.2695652173913044e-05, |
|
"loss": 0.7616, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 29.2, |
|
"learning_rate": 2.2608695652173914e-05, |
|
"loss": 0.7356, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 29.28, |
|
"learning_rate": 2.252173913043478e-05, |
|
"loss": 0.739, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 29.36, |
|
"learning_rate": 2.2434782608695655e-05, |
|
"loss": 0.7591, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 29.44, |
|
"learning_rate": 2.2347826086956522e-05, |
|
"loss": 0.7885, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 29.52, |
|
"learning_rate": 2.2260869565217392e-05, |
|
"loss": 0.8358, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 29.6, |
|
"learning_rate": 2.2173913043478262e-05, |
|
"loss": 0.8253, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 29.68, |
|
"learning_rate": 2.2086956521739133e-05, |
|
"loss": 0.8004, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 29.76, |
|
"learning_rate": 2.2000000000000003e-05, |
|
"loss": 0.7941, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 29.84, |
|
"learning_rate": 2.191304347826087e-05, |
|
"loss": 0.7904, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 29.92, |
|
"learning_rate": 2.182608695652174e-05, |
|
"loss": 0.8109, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 30.0, |
|
"learning_rate": 2.173913043478261e-05, |
|
"loss": 0.8528, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 30.08, |
|
"learning_rate": 2.165217391304348e-05, |
|
"loss": 0.7457, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 30.16, |
|
"learning_rate": 2.1565217391304347e-05, |
|
"loss": 0.755, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 30.24, |
|
"learning_rate": 2.1478260869565218e-05, |
|
"loss": 0.7449, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 30.32, |
|
"learning_rate": 2.1391304347826088e-05, |
|
"loss": 0.7583, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 30.4, |
|
"learning_rate": 2.1304347826086958e-05, |
|
"loss": 0.7352, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 30.48, |
|
"learning_rate": 2.1217391304347828e-05, |
|
"loss": 0.7401, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 30.56, |
|
"learning_rate": 2.1130434782608695e-05, |
|
"loss": 0.7649, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 30.64, |
|
"learning_rate": 2.104347826086957e-05, |
|
"loss": 0.8294, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 30.72, |
|
"learning_rate": 2.0956521739130436e-05, |
|
"loss": 0.7539, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 30.8, |
|
"learning_rate": 2.0869565217391303e-05, |
|
"loss": 0.8142, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 30.88, |
|
"learning_rate": 2.0782608695652176e-05, |
|
"loss": 0.875, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 30.96, |
|
"learning_rate": 2.0695652173913043e-05, |
|
"loss": 0.7937, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 31.04, |
|
"learning_rate": 2.0608695652173913e-05, |
|
"loss": 0.7895, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 31.12, |
|
"learning_rate": 2.0521739130434784e-05, |
|
"loss": 0.7183, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 31.2, |
|
"learning_rate": 2.0434782608695654e-05, |
|
"loss": 0.7453, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 31.28, |
|
"learning_rate": 2.0347826086956524e-05, |
|
"loss": 0.741, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 31.36, |
|
"learning_rate": 2.026086956521739e-05, |
|
"loss": 0.7427, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 31.44, |
|
"learning_rate": 2.017391304347826e-05, |
|
"loss": 0.7211, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 31.52, |
|
"learning_rate": 2.008695652173913e-05, |
|
"loss": 0.7671, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 31.6, |
|
"learning_rate": 2e-05, |
|
"loss": 0.7455, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 31.68, |
|
"learning_rate": 1.9913043478260872e-05, |
|
"loss": 0.7925, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 31.76, |
|
"learning_rate": 1.982608695652174e-05, |
|
"loss": 0.8201, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 31.84, |
|
"learning_rate": 1.973913043478261e-05, |
|
"loss": 0.8209, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 31.92, |
|
"learning_rate": 1.965217391304348e-05, |
|
"loss": 0.8644, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 32.0, |
|
"learning_rate": 1.956521739130435e-05, |
|
"loss": 0.7728, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 32.08, |
|
"learning_rate": 1.9478260869565216e-05, |
|
"loss": 0.7284, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 32.16, |
|
"learning_rate": 1.939130434782609e-05, |
|
"loss": 0.7171, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 32.24, |
|
"learning_rate": 1.9304347826086957e-05, |
|
"loss": 0.7435, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 32.32, |
|
"learning_rate": 1.9217391304347827e-05, |
|
"loss": 0.736, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 32.4, |
|
"learning_rate": 1.9130434782608697e-05, |
|
"loss": 0.7511, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 32.48, |
|
"learning_rate": 1.9043478260869564e-05, |
|
"loss": 0.8181, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 32.56, |
|
"learning_rate": 1.8956521739130438e-05, |
|
"loss": 0.7707, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 32.64, |
|
"learning_rate": 1.8869565217391305e-05, |
|
"loss": 0.7859, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 32.72, |
|
"learning_rate": 1.8782608695652175e-05, |
|
"loss": 0.7545, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 32.8, |
|
"learning_rate": 1.8695652173913045e-05, |
|
"loss": 0.773, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 32.88, |
|
"learning_rate": 1.8608695652173912e-05, |
|
"loss": 0.8116, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 32.96, |
|
"learning_rate": 1.8521739130434786e-05, |
|
"loss": 0.8306, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 33.04, |
|
"learning_rate": 1.8434782608695653e-05, |
|
"loss": 0.715, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 33.12, |
|
"learning_rate": 1.8347826086956523e-05, |
|
"loss": 0.7313, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 33.2, |
|
"learning_rate": 1.8260869565217393e-05, |
|
"loss": 0.7405, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 33.28, |
|
"learning_rate": 1.817391304347826e-05, |
|
"loss": 0.7378, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 33.36, |
|
"learning_rate": 1.808695652173913e-05, |
|
"loss": 0.7845, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 33.44, |
|
"learning_rate": 1.8e-05, |
|
"loss": 0.7204, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 33.52, |
|
"learning_rate": 1.791304347826087e-05, |
|
"loss": 0.7728, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 33.6, |
|
"learning_rate": 1.782608695652174e-05, |
|
"loss": 0.8212, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 33.68, |
|
"learning_rate": 1.7739130434782608e-05, |
|
"loss": 0.7131, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 33.76, |
|
"learning_rate": 1.7652173913043478e-05, |
|
"loss": 0.7555, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 33.84, |
|
"learning_rate": 1.756521739130435e-05, |
|
"loss": 0.8008, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 33.92, |
|
"learning_rate": 1.747826086956522e-05, |
|
"loss": 0.8415, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 34.0, |
|
"learning_rate": 1.739130434782609e-05, |
|
"loss": 0.7872, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 34.08, |
|
"learning_rate": 1.730434782608696e-05, |
|
"loss": 0.7312, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 34.16, |
|
"learning_rate": 1.7217391304347826e-05, |
|
"loss": 0.7106, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 34.24, |
|
"learning_rate": 1.7130434782608696e-05, |
|
"loss": 0.6981, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 34.32, |
|
"learning_rate": 1.7043478260869566e-05, |
|
"loss": 0.6903, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 34.4, |
|
"learning_rate": 1.6956521739130433e-05, |
|
"loss": 0.7726, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 34.48, |
|
"learning_rate": 1.6869565217391307e-05, |
|
"loss": 0.776, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 34.56, |
|
"learning_rate": 1.6782608695652174e-05, |
|
"loss": 0.7745, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 34.64, |
|
"learning_rate": 1.6695652173913044e-05, |
|
"loss": 0.7843, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 34.72, |
|
"learning_rate": 1.6608695652173914e-05, |
|
"loss": 0.7705, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 34.8, |
|
"learning_rate": 1.652173913043478e-05, |
|
"loss": 0.8103, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 34.88, |
|
"learning_rate": 1.6434782608695655e-05, |
|
"loss": 0.793, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 34.96, |
|
"learning_rate": 1.634782608695652e-05, |
|
"loss": 0.7965, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 35.04, |
|
"learning_rate": 1.6260869565217392e-05, |
|
"loss": 0.7574, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 35.12, |
|
"learning_rate": 1.6173913043478262e-05, |
|
"loss": 0.6871, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 35.2, |
|
"learning_rate": 1.608695652173913e-05, |
|
"loss": 0.7258, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 35.28, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 0.7213, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 35.36, |
|
"learning_rate": 1.591304347826087e-05, |
|
"loss": 0.704, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 35.44, |
|
"learning_rate": 1.582608695652174e-05, |
|
"loss": 0.7534, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 35.52, |
|
"learning_rate": 1.573913043478261e-05, |
|
"loss": 0.7666, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 35.6, |
|
"learning_rate": 1.565217391304348e-05, |
|
"loss": 0.7837, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 35.68, |
|
"learning_rate": 1.5565217391304347e-05, |
|
"loss": 0.7544, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 35.76, |
|
"learning_rate": 1.5478260869565217e-05, |
|
"loss": 0.7645, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 35.84, |
|
"learning_rate": 1.5391304347826088e-05, |
|
"loss": 0.7887, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 35.92, |
|
"learning_rate": 1.5304347826086958e-05, |
|
"loss": 0.7201, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 36.0, |
|
"learning_rate": 1.5217391304347828e-05, |
|
"loss": 0.8047, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 36.08, |
|
"learning_rate": 1.5130434782608697e-05, |
|
"loss": 0.682, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 36.16, |
|
"learning_rate": 1.5043478260869565e-05, |
|
"loss": 0.7343, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 36.24, |
|
"learning_rate": 1.4956521739130436e-05, |
|
"loss": 0.7002, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 36.32, |
|
"learning_rate": 1.4869565217391304e-05, |
|
"loss": 0.7418, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 36.4, |
|
"learning_rate": 1.4782608695652176e-05, |
|
"loss": 0.7528, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 36.48, |
|
"learning_rate": 1.4695652173913045e-05, |
|
"loss": 0.7683, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 36.56, |
|
"learning_rate": 1.4608695652173915e-05, |
|
"loss": 0.7687, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 36.64, |
|
"learning_rate": 1.4521739130434783e-05, |
|
"loss": 0.7404, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 36.72, |
|
"learning_rate": 1.4434782608695652e-05, |
|
"loss": 0.7402, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 36.8, |
|
"learning_rate": 1.4347826086956522e-05, |
|
"loss": 0.7542, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 36.88, |
|
"learning_rate": 1.426086956521739e-05, |
|
"loss": 0.8203, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 36.96, |
|
"learning_rate": 1.4173913043478263e-05, |
|
"loss": 0.7896, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 37.04, |
|
"learning_rate": 1.4086956521739131e-05, |
|
"loss": 0.7888, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 37.12, |
|
"learning_rate": 1.4000000000000001e-05, |
|
"loss": 0.691, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 37.2, |
|
"learning_rate": 1.391304347826087e-05, |
|
"loss": 0.7158, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 37.28, |
|
"learning_rate": 1.3826086956521739e-05, |
|
"loss": 0.6983, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 37.36, |
|
"learning_rate": 1.373913043478261e-05, |
|
"loss": 0.7424, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 37.44, |
|
"learning_rate": 1.3652173913043479e-05, |
|
"loss": 0.7273, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 37.52, |
|
"learning_rate": 1.356521739130435e-05, |
|
"loss": 0.7105, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 37.6, |
|
"learning_rate": 1.3478260869565218e-05, |
|
"loss": 0.765, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 37.68, |
|
"learning_rate": 1.3391304347826086e-05, |
|
"loss": 0.7455, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 37.76, |
|
"learning_rate": 1.3304347826086957e-05, |
|
"loss": 0.7378, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 37.84, |
|
"learning_rate": 1.3217391304347825e-05, |
|
"loss": 0.7797, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 37.92, |
|
"learning_rate": 1.3130434782608697e-05, |
|
"loss": 0.7881, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 38.0, |
|
"learning_rate": 1.3043478260869566e-05, |
|
"loss": 0.7675, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 38.08, |
|
"learning_rate": 1.2956521739130436e-05, |
|
"loss": 0.7251, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 38.16, |
|
"learning_rate": 1.2869565217391305e-05, |
|
"loss": 0.736, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 38.24, |
|
"learning_rate": 1.2782608695652173e-05, |
|
"loss": 0.6723, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 38.32, |
|
"learning_rate": 1.2695652173913045e-05, |
|
"loss": 0.736, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 38.4, |
|
"learning_rate": 1.2608695652173914e-05, |
|
"loss": 0.7543, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 38.48, |
|
"learning_rate": 1.2521739130434784e-05, |
|
"loss": 0.708, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 38.56, |
|
"learning_rate": 1.2434782608695652e-05, |
|
"loss": 0.7561, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 38.64, |
|
"learning_rate": 1.2347826086956523e-05, |
|
"loss": 0.7343, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 38.72, |
|
"learning_rate": 1.2260869565217393e-05, |
|
"loss": 0.748, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 38.8, |
|
"learning_rate": 1.2173913043478261e-05, |
|
"loss": 0.7882, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 38.88, |
|
"learning_rate": 1.208695652173913e-05, |
|
"loss": 0.7107, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 38.96, |
|
"learning_rate": 1.2e-05, |
|
"loss": 0.7572, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 39.04, |
|
"learning_rate": 1.191304347826087e-05, |
|
"loss": 0.7749, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 39.12, |
|
"learning_rate": 1.1826086956521739e-05, |
|
"loss": 0.7238, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 39.2, |
|
"learning_rate": 1.173913043478261e-05, |
|
"loss": 0.7204, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 39.28, |
|
"learning_rate": 1.165217391304348e-05, |
|
"loss": 0.7088, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 39.36, |
|
"learning_rate": 1.156521739130435e-05, |
|
"loss": 0.7424, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 39.44, |
|
"learning_rate": 1.1478260869565217e-05, |
|
"loss": 0.7483, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 39.52, |
|
"learning_rate": 1.1391304347826087e-05, |
|
"loss": 0.7048, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 39.6, |
|
"learning_rate": 1.1304347826086957e-05, |
|
"loss": 0.7583, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 39.68, |
|
"learning_rate": 1.1217391304347827e-05, |
|
"loss": 0.6905, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 39.76, |
|
"learning_rate": 1.1130434782608696e-05, |
|
"loss": 0.6934, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 39.84, |
|
"learning_rate": 1.1043478260869566e-05, |
|
"loss": 0.7766, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 39.92, |
|
"learning_rate": 1.0956521739130435e-05, |
|
"loss": 0.7716, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 40.0, |
|
"learning_rate": 1.0869565217391305e-05, |
|
"loss": 0.7482, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 40.08, |
|
"learning_rate": 1.0782608695652174e-05, |
|
"loss": 0.6505, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 40.16, |
|
"learning_rate": 1.0695652173913044e-05, |
|
"loss": 0.6988, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 40.24, |
|
"learning_rate": 1.0608695652173914e-05, |
|
"loss": 0.7083, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 40.32, |
|
"learning_rate": 1.0521739130434784e-05, |
|
"loss": 0.7353, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 40.4, |
|
"learning_rate": 1.0434782608695651e-05, |
|
"loss": 0.7568, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 40.48, |
|
"learning_rate": 1.0347826086956522e-05, |
|
"loss": 0.7481, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 40.56, |
|
"learning_rate": 1.0260869565217392e-05, |
|
"loss": 0.7494, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 40.64, |
|
"learning_rate": 1.0173913043478262e-05, |
|
"loss": 0.7494, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 40.72, |
|
"learning_rate": 1.008695652173913e-05, |
|
"loss": 0.7494, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 40.8, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7479, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 40.88, |
|
"learning_rate": 9.91304347826087e-06, |
|
"loss": 0.7476, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 40.96, |
|
"learning_rate": 9.82608695652174e-06, |
|
"loss": 0.7544, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 41.04, |
|
"learning_rate": 9.739130434782608e-06, |
|
"loss": 0.6957, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 41.12, |
|
"learning_rate": 9.652173913043478e-06, |
|
"loss": 0.7071, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 41.2, |
|
"learning_rate": 9.565217391304349e-06, |
|
"loss": 0.7036, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 41.28, |
|
"learning_rate": 9.478260869565219e-06, |
|
"loss": 0.6897, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 41.36, |
|
"learning_rate": 9.391304347826087e-06, |
|
"loss": 0.7168, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 41.44, |
|
"learning_rate": 9.304347826086956e-06, |
|
"loss": 0.6823, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 41.52, |
|
"learning_rate": 9.217391304347826e-06, |
|
"loss": 0.7238, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 41.6, |
|
"learning_rate": 9.130434782608697e-06, |
|
"loss": 0.7366, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 41.68, |
|
"learning_rate": 9.043478260869565e-06, |
|
"loss": 0.7688, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 41.76, |
|
"learning_rate": 8.956521739130435e-06, |
|
"loss": 0.7606, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 41.84, |
|
"learning_rate": 8.869565217391304e-06, |
|
"loss": 0.7199, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 41.92, |
|
"learning_rate": 8.782608695652174e-06, |
|
"loss": 0.7336, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 42.0, |
|
"learning_rate": 8.695652173913044e-06, |
|
"loss": 0.7061, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 42.08, |
|
"learning_rate": 8.608695652173913e-06, |
|
"loss": 0.7108, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 42.16, |
|
"learning_rate": 8.521739130434783e-06, |
|
"loss": 0.6611, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 42.24, |
|
"learning_rate": 8.434782608695653e-06, |
|
"loss": 0.7001, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 42.32, |
|
"learning_rate": 8.347826086956522e-06, |
|
"loss": 0.7304, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 42.4, |
|
"learning_rate": 8.26086956521739e-06, |
|
"loss": 0.7143, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 42.48, |
|
"learning_rate": 8.17391304347826e-06, |
|
"loss": 0.6987, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 42.56, |
|
"learning_rate": 8.086956521739131e-06, |
|
"loss": 0.7078, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 42.64, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.7553, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 42.72, |
|
"learning_rate": 7.91304347826087e-06, |
|
"loss": 0.7014, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 42.8, |
|
"learning_rate": 7.82608695652174e-06, |
|
"loss": 0.7483, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 42.88, |
|
"learning_rate": 7.739130434782609e-06, |
|
"loss": 0.7629, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 42.96, |
|
"learning_rate": 7.652173913043479e-06, |
|
"loss": 0.7117, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 43.04, |
|
"learning_rate": 7.565217391304348e-06, |
|
"loss": 0.7447, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 43.12, |
|
"learning_rate": 7.478260869565218e-06, |
|
"loss": 0.6614, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 43.2, |
|
"learning_rate": 7.391304347826088e-06, |
|
"loss": 0.7417, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 43.28, |
|
"learning_rate": 7.304347826086957e-06, |
|
"loss": 0.7217, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 43.36, |
|
"learning_rate": 7.217391304347826e-06, |
|
"loss": 0.704, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 43.44, |
|
"learning_rate": 7.130434782608695e-06, |
|
"loss": 0.704, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 43.52, |
|
"learning_rate": 7.043478260869566e-06, |
|
"loss": 0.7896, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 43.6, |
|
"learning_rate": 6.956521739130435e-06, |
|
"loss": 0.7481, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 43.68, |
|
"learning_rate": 6.869565217391305e-06, |
|
"loss": 0.7105, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 43.76, |
|
"learning_rate": 6.782608695652175e-06, |
|
"loss": 0.6717, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 43.84, |
|
"learning_rate": 6.695652173913043e-06, |
|
"loss": 0.6871, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 43.92, |
|
"learning_rate": 6.608695652173913e-06, |
|
"loss": 0.7229, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 44.0, |
|
"learning_rate": 6.521739130434783e-06, |
|
"loss": 0.7324, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 44.08, |
|
"learning_rate": 6.434782608695652e-06, |
|
"loss": 0.6597, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 44.16, |
|
"learning_rate": 6.3478260869565225e-06, |
|
"loss": 0.6771, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 44.24, |
|
"learning_rate": 6.260869565217392e-06, |
|
"loss": 0.7184, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 44.32, |
|
"learning_rate": 6.173913043478261e-06, |
|
"loss": 0.6816, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 44.4, |
|
"learning_rate": 6.086956521739131e-06, |
|
"loss": 0.7171, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 44.48, |
|
"learning_rate": 6e-06, |
|
"loss": 0.6723, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 44.56, |
|
"learning_rate": 5.9130434782608696e-06, |
|
"loss": 0.6991, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 44.64, |
|
"learning_rate": 5.82608695652174e-06, |
|
"loss": 0.7538, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 44.72, |
|
"learning_rate": 5.739130434782608e-06, |
|
"loss": 0.7405, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 44.8, |
|
"learning_rate": 5.652173913043479e-06, |
|
"loss": 0.6698, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 44.88, |
|
"learning_rate": 5.565217391304348e-06, |
|
"loss": 0.7276, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 44.96, |
|
"learning_rate": 5.478260869565217e-06, |
|
"loss": 0.7446, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 45.04, |
|
"learning_rate": 5.391304347826087e-06, |
|
"loss": 0.7269, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 45.12, |
|
"learning_rate": 5.304347826086957e-06, |
|
"loss": 0.6781, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 45.2, |
|
"learning_rate": 5.217391304347826e-06, |
|
"loss": 0.7352, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 45.28, |
|
"learning_rate": 5.130434782608696e-06, |
|
"loss": 0.7599, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 45.36, |
|
"learning_rate": 5.043478260869565e-06, |
|
"loss": 0.7081, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 45.44, |
|
"learning_rate": 4.956521739130435e-06, |
|
"loss": 0.7287, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 45.52, |
|
"learning_rate": 4.869565217391304e-06, |
|
"loss": 0.6988, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 45.6, |
|
"learning_rate": 4.782608695652174e-06, |
|
"loss": 0.716, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 45.68, |
|
"learning_rate": 4.695652173913044e-06, |
|
"loss": 0.6744, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 45.76, |
|
"learning_rate": 4.608695652173913e-06, |
|
"loss": 0.6928, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 45.84, |
|
"learning_rate": 4.5217391304347826e-06, |
|
"loss": 0.721, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 45.92, |
|
"learning_rate": 4.434782608695652e-06, |
|
"loss": 0.7276, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 46.0, |
|
"learning_rate": 4.347826086956522e-06, |
|
"loss": 0.7224, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 46.08, |
|
"learning_rate": 4.260869565217392e-06, |
|
"loss": 0.7101, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 46.16, |
|
"learning_rate": 4.173913043478261e-06, |
|
"loss": 0.6956, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 46.24, |
|
"learning_rate": 4.08695652173913e-06, |
|
"loss": 0.734, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 46.32, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.6655, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 46.4, |
|
"learning_rate": 3.91304347826087e-06, |
|
"loss": 0.6528, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 46.48, |
|
"learning_rate": 3.8260869565217395e-06, |
|
"loss": 0.6876, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 46.56, |
|
"learning_rate": 3.739130434782609e-06, |
|
"loss": 0.6792, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 46.64, |
|
"learning_rate": 3.6521739130434787e-06, |
|
"loss": 0.7622, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 46.72, |
|
"learning_rate": 3.5652173913043477e-06, |
|
"loss": 0.6852, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 46.8, |
|
"learning_rate": 3.4782608695652175e-06, |
|
"loss": 0.6863, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 46.88, |
|
"learning_rate": 3.3913043478260873e-06, |
|
"loss": 0.7497, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 46.96, |
|
"learning_rate": 3.3043478260869563e-06, |
|
"loss": 0.7246, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 47.04, |
|
"learning_rate": 3.217391304347826e-06, |
|
"loss": 0.7235, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 47.12, |
|
"learning_rate": 3.130434782608696e-06, |
|
"loss": 0.7068, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 47.2, |
|
"learning_rate": 3.0434782608695654e-06, |
|
"loss": 0.7105, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 47.28, |
|
"learning_rate": 2.9565217391304348e-06, |
|
"loss": 0.6939, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 47.36, |
|
"learning_rate": 2.869565217391304e-06, |
|
"loss": 0.6867, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 47.44, |
|
"learning_rate": 2.782608695652174e-06, |
|
"loss": 0.6587, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 47.52, |
|
"learning_rate": 2.6956521739130434e-06, |
|
"loss": 0.6914, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 47.6, |
|
"learning_rate": 2.608695652173913e-06, |
|
"loss": 0.6501, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 47.68, |
|
"learning_rate": 2.5217391304347826e-06, |
|
"loss": 0.7491, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 47.76, |
|
"learning_rate": 2.434782608695652e-06, |
|
"loss": 0.7428, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 47.84, |
|
"learning_rate": 2.347826086956522e-06, |
|
"loss": 0.716, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 47.92, |
|
"learning_rate": 2.2608695652173913e-06, |
|
"loss": 0.6706, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 48.0, |
|
"learning_rate": 2.173913043478261e-06, |
|
"loss": 0.6691, |
|
"step": 6000 |
|
} |
|
], |
|
"max_steps": 6250, |
|
"num_train_epochs": 50, |
|
"total_flos": 8948999081401344.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|