|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 7.0, |
|
"eval_steps": 500, |
|
"global_step": 231, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.030303030303030304, |
|
"grad_norm": 6.391723498659803, |
|
"learning_rate": 8.333333333333333e-07, |
|
"loss": 1.0079, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.06060606060606061, |
|
"grad_norm": 6.445240390692674, |
|
"learning_rate": 1.6666666666666667e-06, |
|
"loss": 1.0204, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.09090909090909091, |
|
"grad_norm": 6.382757287715354, |
|
"learning_rate": 2.5e-06, |
|
"loss": 1.0341, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.12121212121212122, |
|
"grad_norm": 5.91272615348115, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 1.0061, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.15151515151515152, |
|
"grad_norm": 4.722008058167302, |
|
"learning_rate": 4.166666666666667e-06, |
|
"loss": 0.9998, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.18181818181818182, |
|
"grad_norm": 2.823573088690271, |
|
"learning_rate": 5e-06, |
|
"loss": 0.9534, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.21212121212121213, |
|
"grad_norm": 2.5666390159658707, |
|
"learning_rate": 5.833333333333334e-06, |
|
"loss": 0.94, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.24242424242424243, |
|
"grad_norm": 3.848402404280175, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.908, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.2727272727272727, |
|
"grad_norm": 4.202645262587812, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.9232, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.30303030303030304, |
|
"grad_norm": 3.9915170290882944, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 0.9167, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 3.23573372272586, |
|
"learning_rate": 9.166666666666666e-06, |
|
"loss": 0.852, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.36363636363636365, |
|
"grad_norm": 2.9616255593701313, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8476, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.3939393939393939, |
|
"grad_norm": 2.0802017021336674, |
|
"learning_rate": 1.0833333333333334e-05, |
|
"loss": 0.8395, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.42424242424242425, |
|
"grad_norm": 1.423032978782169, |
|
"learning_rate": 1.1666666666666668e-05, |
|
"loss": 0.8106, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.45454545454545453, |
|
"grad_norm": 1.5233590243168056, |
|
"learning_rate": 1.25e-05, |
|
"loss": 0.7919, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.48484848484848486, |
|
"grad_norm": 1.4027323424688252, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 0.7876, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.5151515151515151, |
|
"grad_norm": 1.1436169964554868, |
|
"learning_rate": 1.416666666666667e-05, |
|
"loss": 0.7588, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.5454545454545454, |
|
"grad_norm": 1.2584398803137398, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 0.7569, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.5757575757575758, |
|
"grad_norm": 1.1708685062880144, |
|
"learning_rate": 1.5833333333333333e-05, |
|
"loss": 0.7448, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.6060606060606061, |
|
"grad_norm": 0.9803925680829616, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 0.7378, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.6363636363636364, |
|
"grad_norm": 0.9949928486317755, |
|
"learning_rate": 1.7500000000000002e-05, |
|
"loss": 0.7286, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 1.030026417900517, |
|
"learning_rate": 1.8333333333333333e-05, |
|
"loss": 0.718, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.696969696969697, |
|
"grad_norm": 1.1471899394360263, |
|
"learning_rate": 1.916666666666667e-05, |
|
"loss": 0.7148, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.7272727272727273, |
|
"grad_norm": 0.9013002354547361, |
|
"learning_rate": 2e-05, |
|
"loss": 0.7179, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.7575757575757576, |
|
"grad_norm": 1.1125959175909579, |
|
"learning_rate": 1.999884834944106e-05, |
|
"loss": 0.7161, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.7878787878787878, |
|
"grad_norm": 0.9665027218020549, |
|
"learning_rate": 1.9995393663024054e-05, |
|
"loss": 0.7173, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.8181818181818182, |
|
"grad_norm": 0.808324047733547, |
|
"learning_rate": 1.9989636736467278e-05, |
|
"loss": 0.7003, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.8484848484848485, |
|
"grad_norm": 0.8598266261667703, |
|
"learning_rate": 1.9981578895764272e-05, |
|
"loss": 0.7005, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.8787878787878788, |
|
"grad_norm": 0.8011804891864853, |
|
"learning_rate": 1.9971221996878395e-05, |
|
"loss": 0.7101, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.9090909090909091, |
|
"grad_norm": 0.7777553110619772, |
|
"learning_rate": 1.9958568425315316e-05, |
|
"loss": 0.6925, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.9393939393939394, |
|
"grad_norm": 0.8604670869538524, |
|
"learning_rate": 1.9943621095573588e-05, |
|
"loss": 0.6904, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.9696969696969697, |
|
"grad_norm": 0.8621398383957887, |
|
"learning_rate": 1.9926383450473344e-05, |
|
"loss": 0.6751, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.7853612174091144, |
|
"learning_rate": 1.9906859460363307e-05, |
|
"loss": 0.6868, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 1.0303030303030303, |
|
"grad_norm": 0.8046166369612455, |
|
"learning_rate": 1.9885053622206305e-05, |
|
"loss": 0.6497, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 1.0606060606060606, |
|
"grad_norm": 0.6604470778365054, |
|
"learning_rate": 1.986097095854347e-05, |
|
"loss": 0.6293, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 1.0909090909090908, |
|
"grad_norm": 0.6294942264260563, |
|
"learning_rate": 1.9834617016337424e-05, |
|
"loss": 0.6486, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 1.121212121212121, |
|
"grad_norm": 0.5787782281521954, |
|
"learning_rate": 1.9805997865694616e-05, |
|
"loss": 0.6449, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 1.1515151515151516, |
|
"grad_norm": 0.6035558760505482, |
|
"learning_rate": 1.9775120098467212e-05, |
|
"loss": 0.6322, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 1.1818181818181819, |
|
"grad_norm": 0.5880242754158563, |
|
"learning_rate": 1.9741990826734793e-05, |
|
"loss": 0.6351, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 1.2121212121212122, |
|
"grad_norm": 0.5329493991869941, |
|
"learning_rate": 1.970661768116622e-05, |
|
"loss": 0.6373, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 1.2424242424242424, |
|
"grad_norm": 0.5487060567189378, |
|
"learning_rate": 1.9669008809262064e-05, |
|
"loss": 0.6254, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 1.2727272727272727, |
|
"grad_norm": 0.577330341379163, |
|
"learning_rate": 1.9629172873477995e-05, |
|
"loss": 0.6145, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 1.303030303030303, |
|
"grad_norm": 0.548723052022649, |
|
"learning_rate": 1.9587119049229558e-05, |
|
"loss": 0.631, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 1.3333333333333333, |
|
"grad_norm": 0.4741961315186296, |
|
"learning_rate": 1.954285702277879e-05, |
|
"loss": 0.6315, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 1.3636363636363638, |
|
"grad_norm": 0.5322002017205556, |
|
"learning_rate": 1.9496396989003195e-05, |
|
"loss": 0.6315, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 1.393939393939394, |
|
"grad_norm": 0.5448230086028936, |
|
"learning_rate": 1.944774964904754e-05, |
|
"loss": 0.6338, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 1.4242424242424243, |
|
"grad_norm": 0.5721457532205242, |
|
"learning_rate": 1.9396926207859085e-05, |
|
"loss": 0.613, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 1.4545454545454546, |
|
"grad_norm": 0.5011929282794981, |
|
"learning_rate": 1.9343938371606714e-05, |
|
"loss": 0.6268, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 1.4848484848484849, |
|
"grad_norm": 0.6888065111036699, |
|
"learning_rate": 1.9288798344984673e-05, |
|
"loss": 0.6102, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 1.5151515151515151, |
|
"grad_norm": 0.5819664946902224, |
|
"learning_rate": 1.9231518828401458e-05, |
|
"loss": 0.6165, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 1.5454545454545454, |
|
"grad_norm": 0.4977271048283543, |
|
"learning_rate": 1.917211301505453e-05, |
|
"loss": 0.6135, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 1.5757575757575757, |
|
"grad_norm": 0.5483360027543468, |
|
"learning_rate": 1.911059458789152e-05, |
|
"loss": 0.6035, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 1.606060606060606, |
|
"grad_norm": 0.5938360983425195, |
|
"learning_rate": 1.9046977716458627e-05, |
|
"loss": 0.6082, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 1.6363636363636362, |
|
"grad_norm": 0.639778467707558, |
|
"learning_rate": 1.8981277053636963e-05, |
|
"loss": 0.6192, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 1.6666666666666665, |
|
"grad_norm": 0.6299987538556902, |
|
"learning_rate": 1.891350773226754e-05, |
|
"loss": 0.6187, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 1.696969696969697, |
|
"grad_norm": 0.7462238042791106, |
|
"learning_rate": 1.8843685361665724e-05, |
|
"loss": 0.6339, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 1.7272727272727273, |
|
"grad_norm": 0.4587409403241806, |
|
"learning_rate": 1.8771826024025944e-05, |
|
"loss": 0.6019, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 1.7575757575757576, |
|
"grad_norm": 0.6309678302007972, |
|
"learning_rate": 1.8697946270717468e-05, |
|
"loss": 0.6206, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 1.7878787878787878, |
|
"grad_norm": 0.5964975297698822, |
|
"learning_rate": 1.8622063118472135e-05, |
|
"loss": 0.6138, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 1.8181818181818183, |
|
"grad_norm": 0.5091224937984986, |
|
"learning_rate": 1.8544194045464888e-05, |
|
"loss": 0.6237, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.8484848484848486, |
|
"grad_norm": 0.43784547796166895, |
|
"learning_rate": 1.8464356987288012e-05, |
|
"loss": 0.6072, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 1.878787878787879, |
|
"grad_norm": 0.47486738520964883, |
|
"learning_rate": 1.8382570332820045e-05, |
|
"loss": 0.6039, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 1.9090909090909092, |
|
"grad_norm": 0.44865590799121363, |
|
"learning_rate": 1.8298852919990254e-05, |
|
"loss": 0.6092, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 1.9393939393939394, |
|
"grad_norm": 0.5375632964773143, |
|
"learning_rate": 1.821322403143969e-05, |
|
"loss": 0.5939, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 1.9696969696969697, |
|
"grad_norm": 0.608651292228407, |
|
"learning_rate": 1.812570339007983e-05, |
|
"loss": 0.6129, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.44024856323173045, |
|
"learning_rate": 1.8036311154549783e-05, |
|
"loss": 0.5893, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 2.0303030303030303, |
|
"grad_norm": 0.6980844762428723, |
|
"learning_rate": 1.7945067914573147e-05, |
|
"loss": 0.5474, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 2.0606060606060606, |
|
"grad_norm": 0.5182683022073494, |
|
"learning_rate": 1.7851994686215592e-05, |
|
"loss": 0.5725, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 2.090909090909091, |
|
"grad_norm": 0.5404396715701119, |
|
"learning_rate": 1.77571129070442e-05, |
|
"loss": 0.5526, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 2.121212121212121, |
|
"grad_norm": 0.5305063939153388, |
|
"learning_rate": 1.766044443118978e-05, |
|
"loss": 0.5536, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 2.1515151515151514, |
|
"grad_norm": 0.5644734582249971, |
|
"learning_rate": 1.7562011524313187e-05, |
|
"loss": 0.5355, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 2.1818181818181817, |
|
"grad_norm": 0.5202130970874336, |
|
"learning_rate": 1.7461836858476858e-05, |
|
"loss": 0.5384, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 2.212121212121212, |
|
"grad_norm": 0.5357956217064614, |
|
"learning_rate": 1.7359943506922775e-05, |
|
"loss": 0.5442, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 2.242424242424242, |
|
"grad_norm": 0.565767866530521, |
|
"learning_rate": 1.725635493875799e-05, |
|
"loss": 0.5276, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 2.2727272727272725, |
|
"grad_norm": 0.5149462165435431, |
|
"learning_rate": 1.7151095013548996e-05, |
|
"loss": 0.5282, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 2.303030303030303, |
|
"grad_norm": 0.6836826507439627, |
|
"learning_rate": 1.7044187975826126e-05, |
|
"loss": 0.5528, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 2.3333333333333335, |
|
"grad_norm": 0.4830928504569663, |
|
"learning_rate": 1.693565844949933e-05, |
|
"loss": 0.5341, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 2.3636363636363638, |
|
"grad_norm": 0.5788000592263608, |
|
"learning_rate": 1.6825531432186545e-05, |
|
"loss": 0.5385, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 2.393939393939394, |
|
"grad_norm": 0.5726173587627938, |
|
"learning_rate": 1.671383228945597e-05, |
|
"loss": 0.5422, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 2.4242424242424243, |
|
"grad_norm": 0.6643959541973968, |
|
"learning_rate": 1.6600586748983642e-05, |
|
"loss": 0.5548, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 2.4545454545454546, |
|
"grad_norm": 0.48281206376363367, |
|
"learning_rate": 1.648582089462756e-05, |
|
"loss": 0.5416, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 2.484848484848485, |
|
"grad_norm": 0.6009102492415257, |
|
"learning_rate": 1.6369561160419783e-05, |
|
"loss": 0.5322, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 2.515151515151515, |
|
"grad_norm": 0.4893746393385918, |
|
"learning_rate": 1.625183432447789e-05, |
|
"loss": 0.5486, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 2.5454545454545454, |
|
"grad_norm": 0.509521231101079, |
|
"learning_rate": 1.6132667502837164e-05, |
|
"loss": 0.5324, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 2.5757575757575757, |
|
"grad_norm": 0.545869239651775, |
|
"learning_rate": 1.6012088143204953e-05, |
|
"loss": 0.5347, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 2.606060606060606, |
|
"grad_norm": 0.5248012196294125, |
|
"learning_rate": 1.589012401863864e-05, |
|
"loss": 0.5474, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 2.6363636363636362, |
|
"grad_norm": 0.5691007796569792, |
|
"learning_rate": 1.5766803221148676e-05, |
|
"loss": 0.5328, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 2.6666666666666665, |
|
"grad_norm": 0.48088594253053496, |
|
"learning_rate": 1.5642154155228124e-05, |
|
"loss": 0.5281, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 2.6969696969696972, |
|
"grad_norm": 0.4847231502014893, |
|
"learning_rate": 1.5516205531310272e-05, |
|
"loss": 0.5366, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 2.7272727272727275, |
|
"grad_norm": 0.4579402446027877, |
|
"learning_rate": 1.538898635915576e-05, |
|
"loss": 0.5356, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 2.757575757575758, |
|
"grad_norm": 0.5022857211635094, |
|
"learning_rate": 1.526052594117071e-05, |
|
"loss": 0.5257, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 2.787878787878788, |
|
"grad_norm": 0.5197458111165683, |
|
"learning_rate": 1.513085386565758e-05, |
|
"loss": 0.5441, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 2.8181818181818183, |
|
"grad_norm": 0.4443806849341786, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 0.5375, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 2.8484848484848486, |
|
"grad_norm": 0.4415612167940715, |
|
"learning_rate": 1.4867994483783485e-05, |
|
"loss": 0.5198, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 2.878787878787879, |
|
"grad_norm": 0.4729466207945027, |
|
"learning_rate": 1.4734867721853341e-05, |
|
"loss": 0.5327, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 2.909090909090909, |
|
"grad_norm": 0.3927504975139151, |
|
"learning_rate": 1.4600650377311523e-05, |
|
"loss": 0.5323, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 2.9393939393939394, |
|
"grad_norm": 0.4932818281771704, |
|
"learning_rate": 1.4465373364454001e-05, |
|
"loss": 0.54, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 2.9696969696969697, |
|
"grad_norm": 0.395042020106255, |
|
"learning_rate": 1.4329067841650274e-05, |
|
"loss": 0.5499, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 0.4842433679584853, |
|
"learning_rate": 1.4191765204166643e-05, |
|
"loss": 0.5219, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 3.0303030303030303, |
|
"grad_norm": 0.517778147081366, |
|
"learning_rate": 1.4053497076934948e-05, |
|
"loss": 0.4792, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 3.0606060606060606, |
|
"grad_norm": 0.49787957784324754, |
|
"learning_rate": 1.3914295307268396e-05, |
|
"loss": 0.4846, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 3.090909090909091, |
|
"grad_norm": 0.5333577077714253, |
|
"learning_rate": 1.3774191957526144e-05, |
|
"loss": 0.4834, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 3.121212121212121, |
|
"grad_norm": 0.5553372294102208, |
|
"learning_rate": 1.3633219297728415e-05, |
|
"loss": 0.4893, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 3.1515151515151514, |
|
"grad_norm": 0.5225451939758264, |
|
"learning_rate": 1.3491409798123687e-05, |
|
"loss": 0.4755, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 3.1818181818181817, |
|
"grad_norm": 0.5147983075558922, |
|
"learning_rate": 1.3348796121709862e-05, |
|
"loss": 0.4688, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 3.212121212121212, |
|
"grad_norm": 0.596538636274444, |
|
"learning_rate": 1.3205411116710973e-05, |
|
"loss": 0.482, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 3.242424242424242, |
|
"grad_norm": 0.5648728417875709, |
|
"learning_rate": 1.3061287809011243e-05, |
|
"loss": 0.4887, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 3.2727272727272725, |
|
"grad_norm": 0.47558400405296414, |
|
"learning_rate": 1.291645939454825e-05, |
|
"loss": 0.4782, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 3.303030303030303, |
|
"grad_norm": 0.5186604404259834, |
|
"learning_rate": 1.277095923166689e-05, |
|
"loss": 0.4662, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 3.3333333333333335, |
|
"grad_norm": 0.5236731465454446, |
|
"learning_rate": 1.2624820833435939e-05, |
|
"loss": 0.4643, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 3.3636363636363638, |
|
"grad_norm": 0.4607134576648133, |
|
"learning_rate": 1.2478077859929e-05, |
|
"loss": 0.4704, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 3.393939393939394, |
|
"grad_norm": 0.5092626471840158, |
|
"learning_rate": 1.2330764110471567e-05, |
|
"loss": 0.4814, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 3.4242424242424243, |
|
"grad_norm": 0.47207207280474506, |
|
"learning_rate": 1.2182913515856016e-05, |
|
"loss": 0.4565, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 3.4545454545454546, |
|
"grad_norm": 0.4374242509606414, |
|
"learning_rate": 1.2034560130526341e-05, |
|
"loss": 0.4731, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 3.484848484848485, |
|
"grad_norm": 0.4730060091153659, |
|
"learning_rate": 1.1885738124734359e-05, |
|
"loss": 0.4683, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 3.515151515151515, |
|
"grad_norm": 0.4221262453746083, |
|
"learning_rate": 1.1736481776669307e-05, |
|
"loss": 0.4663, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 3.5454545454545454, |
|
"grad_norm": 0.5101200302479499, |
|
"learning_rate": 1.1586825464562515e-05, |
|
"loss": 0.4648, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 3.5757575757575757, |
|
"grad_norm": 0.5103410919800033, |
|
"learning_rate": 1.1436803658769082e-05, |
|
"loss": 0.4779, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 3.606060606060606, |
|
"grad_norm": 0.4450626730517202, |
|
"learning_rate": 1.1286450913828313e-05, |
|
"loss": 0.4752, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 3.6363636363636362, |
|
"grad_norm": 0.4968592992971222, |
|
"learning_rate": 1.113580186050475e-05, |
|
"loss": 0.4741, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 3.6666666666666665, |
|
"grad_norm": 0.4011648197454992, |
|
"learning_rate": 1.0984891197811686e-05, |
|
"loss": 0.4829, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 3.6969696969696972, |
|
"grad_norm": 0.5273690586575496, |
|
"learning_rate": 1.0833753685018935e-05, |
|
"loss": 0.4693, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 3.7272727272727275, |
|
"grad_norm": 0.43414871426684687, |
|
"learning_rate": 1.0682424133646712e-05, |
|
"loss": 0.4684, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 3.757575757575758, |
|
"grad_norm": 0.4014074263052897, |
|
"learning_rate": 1.0530937399447496e-05, |
|
"loss": 0.4812, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 3.787878787878788, |
|
"grad_norm": 0.4547211648900433, |
|
"learning_rate": 1.0379328374377715e-05, |
|
"loss": 0.4629, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 3.8181818181818183, |
|
"grad_norm": 0.3961979664531267, |
|
"learning_rate": 1.0227631978561057e-05, |
|
"loss": 0.475, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 3.8484848484848486, |
|
"grad_norm": 0.43542193984738736, |
|
"learning_rate": 1.0075883152245334e-05, |
|
"loss": 0.4597, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 3.878787878787879, |
|
"grad_norm": 0.40945658123125683, |
|
"learning_rate": 9.92411684775467e-06, |
|
"loss": 0.4751, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 3.909090909090909, |
|
"grad_norm": 0.37125307540613495, |
|
"learning_rate": 9.772368021438943e-06, |
|
"loss": 0.4788, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 3.9393939393939394, |
|
"grad_norm": 0.43624019011061105, |
|
"learning_rate": 9.620671625622287e-06, |
|
"loss": 0.4658, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 3.9696969696969697, |
|
"grad_norm": 0.38438890329969516, |
|
"learning_rate": 9.469062600552509e-06, |
|
"loss": 0.4693, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 0.46071327617582064, |
|
"learning_rate": 9.317575866353293e-06, |
|
"loss": 0.4703, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 4.03030303030303, |
|
"grad_norm": 0.549915464553488, |
|
"learning_rate": 9.166246314981066e-06, |
|
"loss": 0.4316, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 4.0606060606060606, |
|
"grad_norm": 0.49705236619568727, |
|
"learning_rate": 9.015108802188314e-06, |
|
"loss": 0.4099, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 4.090909090909091, |
|
"grad_norm": 0.4289325156966301, |
|
"learning_rate": 8.86419813949525e-06, |
|
"loss": 0.4261, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 4.121212121212121, |
|
"grad_norm": 0.5693436632007958, |
|
"learning_rate": 8.71354908617169e-06, |
|
"loss": 0.4191, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 4.151515151515151, |
|
"grad_norm": 0.5387261802907803, |
|
"learning_rate": 8.56319634123092e-06, |
|
"loss": 0.422, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 4.181818181818182, |
|
"grad_norm": 0.4309658522579619, |
|
"learning_rate": 8.413174535437486e-06, |
|
"loss": 0.4106, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 4.212121212121212, |
|
"grad_norm": 0.5246341763714903, |
|
"learning_rate": 8.263518223330698e-06, |
|
"loss": 0.4288, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 4.242424242424242, |
|
"grad_norm": 0.44678572175146675, |
|
"learning_rate": 8.114261875265643e-06, |
|
"loss": 0.4174, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 4.2727272727272725, |
|
"grad_norm": 0.41094038943781347, |
|
"learning_rate": 7.965439869473664e-06, |
|
"loss": 0.4077, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 4.303030303030303, |
|
"grad_norm": 0.40324815976350814, |
|
"learning_rate": 7.817086484143987e-06, |
|
"loss": 0.4168, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 4.333333333333333, |
|
"grad_norm": 0.4677095167961872, |
|
"learning_rate": 7.669235889528436e-06, |
|
"loss": 0.4228, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 4.363636363636363, |
|
"grad_norm": 0.4231620470214181, |
|
"learning_rate": 7.521922140071003e-06, |
|
"loss": 0.406, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 4.393939393939394, |
|
"grad_norm": 0.38472876180976645, |
|
"learning_rate": 7.375179166564062e-06, |
|
"loss": 0.419, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 4.424242424242424, |
|
"grad_norm": 0.39815218699138305, |
|
"learning_rate": 7.2290407683331154e-06, |
|
"loss": 0.4217, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 4.454545454545454, |
|
"grad_norm": 0.38973334842621504, |
|
"learning_rate": 7.0835406054517505e-06, |
|
"loss": 0.407, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 4.484848484848484, |
|
"grad_norm": 0.357129065814448, |
|
"learning_rate": 6.93871219098876e-06, |
|
"loss": 0.4161, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 4.515151515151516, |
|
"grad_norm": 0.37574480732292936, |
|
"learning_rate": 6.79458888328903e-06, |
|
"loss": 0.4127, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 4.545454545454545, |
|
"grad_norm": 0.4304590573047278, |
|
"learning_rate": 6.651203878290139e-06, |
|
"loss": 0.4136, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 4.575757575757576, |
|
"grad_norm": 0.37455970862907545, |
|
"learning_rate": 6.508590201876317e-06, |
|
"loss": 0.4126, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 4.606060606060606, |
|
"grad_norm": 0.35140746880501045, |
|
"learning_rate": 6.366780702271589e-06, |
|
"loss": 0.402, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 4.636363636363637, |
|
"grad_norm": 0.35468098669148884, |
|
"learning_rate": 6.225808042473857e-06, |
|
"loss": 0.4148, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 4.666666666666667, |
|
"grad_norm": 0.35870638919737535, |
|
"learning_rate": 6.085704692731609e-06, |
|
"loss": 0.4173, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 4.696969696969697, |
|
"grad_norm": 0.3404383800514381, |
|
"learning_rate": 5.946502923065054e-06, |
|
"loss": 0.4148, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 4.7272727272727275, |
|
"grad_norm": 0.3414680860961153, |
|
"learning_rate": 5.8082347958333625e-06, |
|
"loss": 0.4187, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 4.757575757575758, |
|
"grad_norm": 0.3612773359243806, |
|
"learning_rate": 5.670932158349732e-06, |
|
"loss": 0.4156, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 4.787878787878788, |
|
"grad_norm": 0.3455389798706968, |
|
"learning_rate": 5.534626635546e-06, |
|
"loss": 0.4121, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 4.818181818181818, |
|
"grad_norm": 0.3469521902127527, |
|
"learning_rate": 5.399349622688479e-06, |
|
"loss": 0.42, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 4.848484848484849, |
|
"grad_norm": 0.3287731553618517, |
|
"learning_rate": 5.2651322781466606e-06, |
|
"loss": 0.4236, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 4.878787878787879, |
|
"grad_norm": 0.3271765785486276, |
|
"learning_rate": 5.132005516216512e-06, |
|
"loss": 0.4135, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 4.909090909090909, |
|
"grad_norm": 0.31239522699154915, |
|
"learning_rate": 5.000000000000003e-06, |
|
"loss": 0.4097, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 4.9393939393939394, |
|
"grad_norm": 0.3156681272030559, |
|
"learning_rate": 4.869146134342426e-06, |
|
"loss": 0.3954, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 4.96969696969697, |
|
"grad_norm": 0.36382188933769277, |
|
"learning_rate": 4.739474058829288e-06, |
|
"loss": 0.4229, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 0.34045399086841477, |
|
"learning_rate": 4.611013640844245e-06, |
|
"loss": 0.4149, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 5.03030303030303, |
|
"grad_norm": 0.5044756046722096, |
|
"learning_rate": 4.483794468689728e-06, |
|
"loss": 0.3844, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 5.0606060606060606, |
|
"grad_norm": 0.41237791230202236, |
|
"learning_rate": 4.357845844771881e-06, |
|
"loss": 0.3671, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 5.090909090909091, |
|
"grad_norm": 0.34955916182422403, |
|
"learning_rate": 4.2331967788513295e-06, |
|
"loss": 0.373, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 5.121212121212121, |
|
"grad_norm": 0.4438250960546701, |
|
"learning_rate": 4.109875981361363e-06, |
|
"loss": 0.3564, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 5.151515151515151, |
|
"grad_norm": 0.5378833215453719, |
|
"learning_rate": 3.987911856795047e-06, |
|
"loss": 0.3675, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 5.181818181818182, |
|
"grad_norm": 0.42285744976151174, |
|
"learning_rate": 3.867332497162836e-06, |
|
"loss": 0.3746, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 5.212121212121212, |
|
"grad_norm": 0.3830432468541377, |
|
"learning_rate": 3.748165675522113e-06, |
|
"loss": 0.3703, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 5.242424242424242, |
|
"grad_norm": 0.3858055962114919, |
|
"learning_rate": 3.630438839580217e-06, |
|
"loss": 0.3783, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 5.2727272727272725, |
|
"grad_norm": 0.3917855532263057, |
|
"learning_rate": 3.5141791053724405e-06, |
|
"loss": 0.3592, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 5.303030303030303, |
|
"grad_norm": 0.419316148500706, |
|
"learning_rate": 3.399413251016359e-06, |
|
"loss": 0.3706, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 5.333333333333333, |
|
"grad_norm": 0.38280771968727684, |
|
"learning_rate": 3.2861677105440335e-06, |
|
"loss": 0.369, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 5.363636363636363, |
|
"grad_norm": 0.37161552848703766, |
|
"learning_rate": 3.174468567813461e-06, |
|
"loss": 0.3683, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 5.393939393939394, |
|
"grad_norm": 0.3839626609743638, |
|
"learning_rate": 3.0643415505006733e-06, |
|
"loss": 0.373, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 5.424242424242424, |
|
"grad_norm": 0.3737777443133059, |
|
"learning_rate": 2.9558120241738786e-06, |
|
"loss": 0.3751, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 5.454545454545454, |
|
"grad_norm": 0.3699742548934759, |
|
"learning_rate": 2.8489049864510053e-06, |
|
"loss": 0.3705, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 5.484848484848484, |
|
"grad_norm": 0.3621054260712193, |
|
"learning_rate": 2.7436450612420098e-06, |
|
"loss": 0.3692, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 5.515151515151516, |
|
"grad_norm": 0.36441907661210643, |
|
"learning_rate": 2.640056493077231e-06, |
|
"loss": 0.3767, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 5.545454545454545, |
|
"grad_norm": 0.34402215120323093, |
|
"learning_rate": 2.5381631415231455e-06, |
|
"loss": 0.3759, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 5.575757575757576, |
|
"grad_norm": 0.32795833240845856, |
|
"learning_rate": 2.4379884756868167e-06, |
|
"loss": 0.3789, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 5.606060606060606, |
|
"grad_norm": 0.32215801470209426, |
|
"learning_rate": 2.339555568810221e-06, |
|
"loss": 0.3711, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 5.636363636363637, |
|
"grad_norm": 0.33932527796232764, |
|
"learning_rate": 2.2428870929558012e-06, |
|
"loss": 0.3806, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 5.666666666666667, |
|
"grad_norm": 0.33010171544059436, |
|
"learning_rate": 2.1480053137844115e-06, |
|
"loss": 0.3793, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 5.696969696969697, |
|
"grad_norm": 0.3347552964327516, |
|
"learning_rate": 2.054932085426856e-06, |
|
"loss": 0.388, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 5.7272727272727275, |
|
"grad_norm": 0.30073887916372427, |
|
"learning_rate": 1.963688845450218e-06, |
|
"loss": 0.3744, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 5.757575757575758, |
|
"grad_norm": 0.3131014609633843, |
|
"learning_rate": 1.8742966099201699e-06, |
|
"loss": 0.3641, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 5.787878787878788, |
|
"grad_norm": 0.3325293489769436, |
|
"learning_rate": 1.7867759685603115e-06, |
|
"loss": 0.3624, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 5.818181818181818, |
|
"grad_norm": 0.30487109929123163, |
|
"learning_rate": 1.7011470800097496e-06, |
|
"loss": 0.3752, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 5.848484848484849, |
|
"grad_norm": 0.2984494414418286, |
|
"learning_rate": 1.6174296671799571e-06, |
|
"loss": 0.3569, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 5.878787878787879, |
|
"grad_norm": 0.30215385875684886, |
|
"learning_rate": 1.5356430127119915e-06, |
|
"loss": 0.3644, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 5.909090909090909, |
|
"grad_norm": 0.2944822474056789, |
|
"learning_rate": 1.4558059545351144e-06, |
|
"loss": 0.3733, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 5.9393939393939394, |
|
"grad_norm": 0.2901537050818472, |
|
"learning_rate": 1.3779368815278648e-06, |
|
"loss": 0.3736, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 5.96969696969697, |
|
"grad_norm": 0.3021704491376866, |
|
"learning_rate": 1.302053729282533e-06, |
|
"loss": 0.3587, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"grad_norm": 0.33659568526850164, |
|
"learning_rate": 1.2281739759740575e-06, |
|
"loss": 0.374, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 6.03030303030303, |
|
"grad_norm": 0.3961446442740241, |
|
"learning_rate": 1.156314638334277e-06, |
|
"loss": 0.3456, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 6.0606060606060606, |
|
"grad_norm": 0.3578301159030496, |
|
"learning_rate": 1.086492267732462e-06, |
|
"loss": 0.3346, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 6.090909090909091, |
|
"grad_norm": 0.32368117334038604, |
|
"learning_rate": 1.01872294636304e-06, |
|
"loss": 0.3551, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 6.121212121212121, |
|
"grad_norm": 0.30832624564558136, |
|
"learning_rate": 9.530222835413739e-07, |
|
"loss": 0.3485, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 6.151515151515151, |
|
"grad_norm": 0.3084039883834137, |
|
"learning_rate": 8.894054121084839e-07, |
|
"loss": 0.3435, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 6.181818181818182, |
|
"grad_norm": 0.3022004078228386, |
|
"learning_rate": 8.278869849454718e-07, |
|
"loss": 0.3489, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 6.212121212121212, |
|
"grad_norm": 0.3351879674470543, |
|
"learning_rate": 7.684811715985429e-07, |
|
"loss": 0.3423, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 6.242424242424242, |
|
"grad_norm": 0.3223254087806397, |
|
"learning_rate": 7.1120165501533e-07, |
|
"loss": 0.3576, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 6.2727272727272725, |
|
"grad_norm": 0.3249796694501032, |
|
"learning_rate": 6.560616283932897e-07, |
|
"loss": 0.3596, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 6.303030303030303, |
|
"grad_norm": 0.3383471916015826, |
|
"learning_rate": 6.030737921409169e-07, |
|
"loss": 0.3579, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 6.333333333333333, |
|
"grad_norm": 0.3210379088186702, |
|
"learning_rate": 5.522503509524591e-07, |
|
"loss": 0.3473, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 6.363636363636363, |
|
"grad_norm": 0.32482561399174104, |
|
"learning_rate": 5.036030109968082e-07, |
|
"loss": 0.3467, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 6.393939393939394, |
|
"grad_norm": 0.29307954683423065, |
|
"learning_rate": 4.5714297722121105e-07, |
|
"loss": 0.349, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 6.424242424242424, |
|
"grad_norm": 0.28564159916131726, |
|
"learning_rate": 4.128809507704445e-07, |
|
"loss": 0.3458, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 6.454545454545454, |
|
"grad_norm": 0.2794555238624641, |
|
"learning_rate": 3.708271265220087e-07, |
|
"loss": 0.3478, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 6.484848484848484, |
|
"grad_norm": 0.2739868217444457, |
|
"learning_rate": 3.309911907379393e-07, |
|
"loss": 0.3588, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 6.515151515151516, |
|
"grad_norm": 0.29345881187170175, |
|
"learning_rate": 2.9338231883378365e-07, |
|
"loss": 0.3528, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 6.545454545454545, |
|
"grad_norm": 0.30010189778050617, |
|
"learning_rate": 2.5800917326521013e-07, |
|
"loss": 0.3544, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 6.575757575757576, |
|
"grad_norm": 0.28752501262154156, |
|
"learning_rate": 2.248799015327907e-07, |
|
"loss": 0.3378, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 6.606060606060606, |
|
"grad_norm": 0.3072149247627371, |
|
"learning_rate": 1.9400213430538773e-07, |
|
"loss": 0.336, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 6.636363636363637, |
|
"grad_norm": 0.2713972189812151, |
|
"learning_rate": 1.6538298366257975e-07, |
|
"loss": 0.3443, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 6.666666666666667, |
|
"grad_norm": 0.2954197120252145, |
|
"learning_rate": 1.3902904145653094e-07, |
|
"loss": 0.3542, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 6.696969696969697, |
|
"grad_norm": 0.28950931222260234, |
|
"learning_rate": 1.1494637779369766e-07, |
|
"loss": 0.3524, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 6.7272727272727275, |
|
"grad_norm": 0.2751344749931131, |
|
"learning_rate": 9.314053963669245e-08, |
|
"loss": 0.3538, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 6.757575757575758, |
|
"grad_norm": 0.30131842899827077, |
|
"learning_rate": 7.361654952665608e-08, |
|
"loss": 0.335, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 6.787878787878788, |
|
"grad_norm": 0.27821946794035596, |
|
"learning_rate": 5.637890442641403e-08, |
|
"loss": 0.3757, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 6.818181818181818, |
|
"grad_norm": 0.27764319306193624, |
|
"learning_rate": 4.143157468468717e-08, |
|
"loss": 0.3583, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 6.848484848484849, |
|
"grad_norm": 0.3249780204193289, |
|
"learning_rate": 2.8778003121607834e-08, |
|
"loss": 0.3441, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 6.878787878787879, |
|
"grad_norm": 0.2852587967210579, |
|
"learning_rate": 1.8421104235727406e-08, |
|
"loss": 0.3502, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 6.909090909090909, |
|
"grad_norm": 0.28096930566095124, |
|
"learning_rate": 1.0363263532724433e-08, |
|
"loss": 0.3478, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 6.9393939393939394, |
|
"grad_norm": 0.2910326513319612, |
|
"learning_rate": 4.606336975948589e-09, |
|
"loss": 0.3506, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 6.96969696969697, |
|
"grad_norm": 0.2693333849629141, |
|
"learning_rate": 1.1516505589381777e-09, |
|
"loss": 0.3536, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"grad_norm": 0.2762734213295961, |
|
"learning_rate": 0.0, |
|
"loss": 0.3553, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"step": 231, |
|
"total_flos": 6.488329161885614e+17, |
|
"train_loss": 0.511818146808839, |
|
"train_runtime": 8828.5494, |
|
"train_samples_per_second": 2.506, |
|
"train_steps_per_second": 0.026 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 231, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 7, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 6.488329161885614e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|