|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.9904153354632586, |
|
"eval_steps": 500, |
|
"global_step": 312, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.009584664536741214, |
|
"grad_norm": 6.805100440979004, |
|
"learning_rate": 3.125e-07, |
|
"loss": 0.8451, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.019169329073482427, |
|
"grad_norm": 6.944859504699707, |
|
"learning_rate": 6.25e-07, |
|
"loss": 0.8823, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.02875399361022364, |
|
"grad_norm": 6.678200721740723, |
|
"learning_rate": 9.375000000000001e-07, |
|
"loss": 0.8476, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.038338658146964855, |
|
"grad_norm": 6.952979564666748, |
|
"learning_rate": 1.25e-06, |
|
"loss": 0.912, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.04792332268370607, |
|
"grad_norm": 6.973270416259766, |
|
"learning_rate": 1.5625e-06, |
|
"loss": 0.8781, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.05750798722044728, |
|
"grad_norm": 6.345473289489746, |
|
"learning_rate": 1.8750000000000003e-06, |
|
"loss": 0.8242, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0670926517571885, |
|
"grad_norm": 5.5535969734191895, |
|
"learning_rate": 2.1875000000000002e-06, |
|
"loss": 0.8251, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.07667731629392971, |
|
"grad_norm": 5.133858680725098, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.8043, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.08626198083067092, |
|
"grad_norm": 3.2064390182495117, |
|
"learning_rate": 2.8125e-06, |
|
"loss": 0.7508, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.09584664536741214, |
|
"grad_norm": 3.2898361682891846, |
|
"learning_rate": 3.125e-06, |
|
"loss": 0.7751, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.10543130990415335, |
|
"grad_norm": 2.8389291763305664, |
|
"learning_rate": 3.4375e-06, |
|
"loss": 0.7334, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.11501597444089456, |
|
"grad_norm": 2.9657039642333984, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 0.6988, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.12460063897763578, |
|
"grad_norm": 3.3944308757781982, |
|
"learning_rate": 4.0625000000000005e-06, |
|
"loss": 0.6876, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.134185303514377, |
|
"grad_norm": 3.525303840637207, |
|
"learning_rate": 4.3750000000000005e-06, |
|
"loss": 0.6934, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.14376996805111822, |
|
"grad_norm": 3.2581071853637695, |
|
"learning_rate": 4.6875000000000004e-06, |
|
"loss": 0.6867, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.15335463258785942, |
|
"grad_norm": 2.7474966049194336, |
|
"learning_rate": 5e-06, |
|
"loss": 0.617, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.16293929712460065, |
|
"grad_norm": 2.480701208114624, |
|
"learning_rate": 5.3125e-06, |
|
"loss": 0.6209, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.17252396166134185, |
|
"grad_norm": 2.3733997344970703, |
|
"learning_rate": 5.625e-06, |
|
"loss": 0.6337, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.18210862619808307, |
|
"grad_norm": 2.1485886573791504, |
|
"learning_rate": 5.9375e-06, |
|
"loss": 0.6037, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.19169329073482427, |
|
"grad_norm": 1.8110982179641724, |
|
"learning_rate": 6.25e-06, |
|
"loss": 0.6138, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.2012779552715655, |
|
"grad_norm": 1.5806992053985596, |
|
"learning_rate": 6.5625e-06, |
|
"loss": 0.5795, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.2108626198083067, |
|
"grad_norm": 1.5841341018676758, |
|
"learning_rate": 6.875e-06, |
|
"loss": 0.6092, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.22044728434504793, |
|
"grad_norm": 1.566663146018982, |
|
"learning_rate": 7.1875e-06, |
|
"loss": 0.572, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.23003194888178913, |
|
"grad_norm": 1.5222035646438599, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.5908, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.23961661341853036, |
|
"grad_norm": 1.2543559074401855, |
|
"learning_rate": 7.8125e-06, |
|
"loss": 0.5891, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.24920127795527156, |
|
"grad_norm": 1.1692081689834595, |
|
"learning_rate": 8.125000000000001e-06, |
|
"loss": 0.5471, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.25878594249201275, |
|
"grad_norm": 1.3132680654525757, |
|
"learning_rate": 8.4375e-06, |
|
"loss": 0.5494, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.268370607028754, |
|
"grad_norm": 1.0260133743286133, |
|
"learning_rate": 8.750000000000001e-06, |
|
"loss": 0.5169, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.2779552715654952, |
|
"grad_norm": 0.9326399564743042, |
|
"learning_rate": 9.0625e-06, |
|
"loss": 0.5331, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.28753993610223644, |
|
"grad_norm": 1.008641242980957, |
|
"learning_rate": 9.375000000000001e-06, |
|
"loss": 0.5607, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.2971246006389776, |
|
"grad_norm": 1.1734907627105713, |
|
"learning_rate": 9.6875e-06, |
|
"loss": 0.5451, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.30670926517571884, |
|
"grad_norm": 0.8239313960075378, |
|
"learning_rate": 1e-05, |
|
"loss": 0.5468, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.31629392971246006, |
|
"grad_norm": 0.8218770623207092, |
|
"learning_rate": 9.999685283773504e-06, |
|
"loss": 0.5069, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.3258785942492013, |
|
"grad_norm": 0.7858874797821045, |
|
"learning_rate": 9.998741174712534e-06, |
|
"loss": 0.518, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.3354632587859425, |
|
"grad_norm": 0.8673418760299683, |
|
"learning_rate": 9.997167791667668e-06, |
|
"loss": 0.539, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.3450479233226837, |
|
"grad_norm": 0.8987308740615845, |
|
"learning_rate": 9.994965332706574e-06, |
|
"loss": 0.5339, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.3546325878594249, |
|
"grad_norm": 0.6194221377372742, |
|
"learning_rate": 9.992134075089085e-06, |
|
"loss": 0.4994, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.36421725239616615, |
|
"grad_norm": 0.7056483030319214, |
|
"learning_rate": 9.98867437523228e-06, |
|
"loss": 0.5016, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.3738019169329074, |
|
"grad_norm": 0.7977483868598938, |
|
"learning_rate": 9.984586668665641e-06, |
|
"loss": 0.5228, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.38338658146964855, |
|
"grad_norm": 0.8258603811264038, |
|
"learning_rate": 9.979871469976197e-06, |
|
"loss": 0.5016, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.3929712460063898, |
|
"grad_norm": 0.7627078294754028, |
|
"learning_rate": 9.974529372743762e-06, |
|
"loss": 0.5001, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.402555910543131, |
|
"grad_norm": 0.7237797379493713, |
|
"learning_rate": 9.968561049466214e-06, |
|
"loss": 0.5044, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.41214057507987223, |
|
"grad_norm": 0.6851755380630493, |
|
"learning_rate": 9.961967251474823e-06, |
|
"loss": 0.522, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.4217252396166134, |
|
"grad_norm": 0.7777822613716125, |
|
"learning_rate": 9.954748808839675e-06, |
|
"loss": 0.4991, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.43130990415335463, |
|
"grad_norm": 0.7029432654380798, |
|
"learning_rate": 9.946906630265184e-06, |
|
"loss": 0.5415, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.44089456869009586, |
|
"grad_norm": 0.6500408053398132, |
|
"learning_rate": 9.938441702975689e-06, |
|
"loss": 0.4806, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.4504792332268371, |
|
"grad_norm": 0.7370374202728271, |
|
"learning_rate": 9.92935509259118e-06, |
|
"loss": 0.5094, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.46006389776357826, |
|
"grad_norm": 0.6582643389701843, |
|
"learning_rate": 9.91964794299315e-06, |
|
"loss": 0.5158, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.4696485623003195, |
|
"grad_norm": 0.6737906336784363, |
|
"learning_rate": 9.909321476180594e-06, |
|
"loss": 0.4944, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.4792332268370607, |
|
"grad_norm": 0.6686286926269531, |
|
"learning_rate": 9.898376992116179e-06, |
|
"loss": 0.4889, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.48881789137380194, |
|
"grad_norm": 0.689642071723938, |
|
"learning_rate": 9.886815868562596e-06, |
|
"loss": 0.518, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.4984025559105431, |
|
"grad_norm": 0.6608878970146179, |
|
"learning_rate": 9.874639560909118e-06, |
|
"loss": 0.499, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.5079872204472844, |
|
"grad_norm": 0.7523769736289978, |
|
"learning_rate": 9.861849601988384e-06, |
|
"loss": 0.5226, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.5175718849840255, |
|
"grad_norm": 0.6801315546035767, |
|
"learning_rate": 9.848447601883436e-06, |
|
"loss": 0.4806, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.5271565495207667, |
|
"grad_norm": 0.6715684533119202, |
|
"learning_rate": 9.834435247725032e-06, |
|
"loss": 0.4882, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.536741214057508, |
|
"grad_norm": 0.6812077760696411, |
|
"learning_rate": 9.819814303479268e-06, |
|
"loss": 0.4752, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.5463258785942492, |
|
"grad_norm": 0.7278083562850952, |
|
"learning_rate": 9.804586609725499e-06, |
|
"loss": 0.4914, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.5559105431309904, |
|
"grad_norm": 0.7285177707672119, |
|
"learning_rate": 9.788754083424654e-06, |
|
"loss": 0.4659, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.5654952076677316, |
|
"grad_norm": 0.6427510976791382, |
|
"learning_rate": 9.772318717677905e-06, |
|
"loss": 0.4961, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.5750798722044729, |
|
"grad_norm": 0.7198605537414551, |
|
"learning_rate": 9.755282581475769e-06, |
|
"loss": 0.4894, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.5846645367412141, |
|
"grad_norm": 0.7601820230484009, |
|
"learning_rate": 9.737647819437645e-06, |
|
"loss": 0.4906, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.5942492012779552, |
|
"grad_norm": 0.6605767607688904, |
|
"learning_rate": 9.719416651541839e-06, |
|
"loss": 0.4854, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.6038338658146964, |
|
"grad_norm": 0.7106564044952393, |
|
"learning_rate": 9.700591372846096e-06, |
|
"loss": 0.4793, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.6134185303514377, |
|
"grad_norm": 0.684380829334259, |
|
"learning_rate": 9.681174353198687e-06, |
|
"loss": 0.4841, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.6230031948881789, |
|
"grad_norm": 0.6601747274398804, |
|
"learning_rate": 9.661168036940071e-06, |
|
"loss": 0.4647, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.6325878594249201, |
|
"grad_norm": 0.6643561124801636, |
|
"learning_rate": 9.640574942595195e-06, |
|
"loss": 0.5038, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.6421725239616614, |
|
"grad_norm": 0.665458619594574, |
|
"learning_rate": 9.619397662556434e-06, |
|
"loss": 0.4716, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.6517571884984026, |
|
"grad_norm": 0.6601731181144714, |
|
"learning_rate": 9.597638862757255e-06, |
|
"loss": 0.4892, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.6613418530351438, |
|
"grad_norm": 0.7274516224861145, |
|
"learning_rate": 9.5753012823366e-06, |
|
"loss": 0.4876, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.670926517571885, |
|
"grad_norm": 0.6386899948120117, |
|
"learning_rate": 9.552387733294081e-06, |
|
"loss": 0.4596, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.6805111821086262, |
|
"grad_norm": 0.6390331983566284, |
|
"learning_rate": 9.528901100135971e-06, |
|
"loss": 0.4667, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.6900958466453674, |
|
"grad_norm": 0.6199338436126709, |
|
"learning_rate": 9.504844339512096e-06, |
|
"loss": 0.4902, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.6996805111821086, |
|
"grad_norm": 0.6297160387039185, |
|
"learning_rate": 9.480220479843627e-06, |
|
"loss": 0.4674, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.7092651757188498, |
|
"grad_norm": 0.5854744911193848, |
|
"learning_rate": 9.45503262094184e-06, |
|
"loss": 0.4738, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.7188498402555911, |
|
"grad_norm": 0.6352077722549438, |
|
"learning_rate": 9.4292839336179e-06, |
|
"loss": 0.4696, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.7284345047923323, |
|
"grad_norm": 0.6318387389183044, |
|
"learning_rate": 9.40297765928369e-06, |
|
"loss": 0.4745, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.7380191693290735, |
|
"grad_norm": 0.7550281286239624, |
|
"learning_rate": 9.376117109543769e-06, |
|
"loss": 0.4694, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.7476038338658147, |
|
"grad_norm": 0.7698407173156738, |
|
"learning_rate": 9.348705665778479e-06, |
|
"loss": 0.4698, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.7571884984025559, |
|
"grad_norm": 0.6444739103317261, |
|
"learning_rate": 9.320746778718274e-06, |
|
"loss": 0.4352, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.7667731629392971, |
|
"grad_norm": 0.6695520281791687, |
|
"learning_rate": 9.292243968009332e-06, |
|
"loss": 0.4778, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.7763578274760383, |
|
"grad_norm": 0.6107879877090454, |
|
"learning_rate": 9.263200821770462e-06, |
|
"loss": 0.4693, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.7859424920127795, |
|
"grad_norm": 0.583711564540863, |
|
"learning_rate": 9.233620996141421e-06, |
|
"loss": 0.467, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.7955271565495208, |
|
"grad_norm": 0.6243283152580261, |
|
"learning_rate": 9.203508214822652e-06, |
|
"loss": 0.4936, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.805111821086262, |
|
"grad_norm": 0.6546980738639832, |
|
"learning_rate": 9.172866268606514e-06, |
|
"loss": 0.4662, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.8146964856230032, |
|
"grad_norm": 0.6571932435035706, |
|
"learning_rate": 9.141699014900084e-06, |
|
"loss": 0.5032, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.8242811501597445, |
|
"grad_norm": 0.6166942119598389, |
|
"learning_rate": 9.110010377239552e-06, |
|
"loss": 0.481, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.8338658146964856, |
|
"grad_norm": 0.6507984399795532, |
|
"learning_rate": 9.077804344796302e-06, |
|
"loss": 0.4515, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.8434504792332268, |
|
"grad_norm": 0.6561005711555481, |
|
"learning_rate": 9.045084971874738e-06, |
|
"loss": 0.4935, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.853035143769968, |
|
"grad_norm": 0.6434285044670105, |
|
"learning_rate": 9.011856377401891e-06, |
|
"loss": 0.4832, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.8626198083067093, |
|
"grad_norm": 0.6693336367607117, |
|
"learning_rate": 8.978122744408905e-06, |
|
"loss": 0.4561, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.8722044728434505, |
|
"grad_norm": 0.6963056325912476, |
|
"learning_rate": 8.943888319504456e-06, |
|
"loss": 0.4893, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.8817891373801917, |
|
"grad_norm": 0.6495336890220642, |
|
"learning_rate": 8.90915741234015e-06, |
|
"loss": 0.4851, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.8913738019169329, |
|
"grad_norm": 0.6429077386856079, |
|
"learning_rate": 8.873934395068006e-06, |
|
"loss": 0.4797, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.9009584664536742, |
|
"grad_norm": 0.7217315435409546, |
|
"learning_rate": 8.838223701790057e-06, |
|
"loss": 0.4641, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.9105431309904153, |
|
"grad_norm": 0.6442226767539978, |
|
"learning_rate": 8.802029828000157e-06, |
|
"loss": 0.4663, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.9201277955271565, |
|
"grad_norm": 0.6835136413574219, |
|
"learning_rate": 8.765357330018056e-06, |
|
"loss": 0.5088, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.9297124600638977, |
|
"grad_norm": 0.672391951084137, |
|
"learning_rate": 8.728210824415829e-06, |
|
"loss": 0.4482, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.939297124600639, |
|
"grad_norm": 0.7429690361022949, |
|
"learning_rate": 8.690594987436705e-06, |
|
"loss": 0.4659, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.9488817891373802, |
|
"grad_norm": 0.6244953274726868, |
|
"learning_rate": 8.652514554406388e-06, |
|
"loss": 0.4798, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.9584664536741214, |
|
"grad_norm": 0.606186032295227, |
|
"learning_rate": 8.613974319136959e-06, |
|
"loss": 0.4733, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.9680511182108626, |
|
"grad_norm": 0.6192579865455627, |
|
"learning_rate": 8.574979133323378e-06, |
|
"loss": 0.4735, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.9776357827476039, |
|
"grad_norm": 0.6590493321418762, |
|
"learning_rate": 8.535533905932739e-06, |
|
"loss": 0.4616, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.987220447284345, |
|
"grad_norm": 0.7287415862083435, |
|
"learning_rate": 8.495643602586287e-06, |
|
"loss": 0.4889, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.9968051118210862, |
|
"grad_norm": 0.5435300469398499, |
|
"learning_rate": 8.455313244934324e-06, |
|
"loss": 0.4721, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.0063897763578276, |
|
"grad_norm": 0.6497277021408081, |
|
"learning_rate": 8.414547910024035e-06, |
|
"loss": 0.3994, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.0159744408945688, |
|
"grad_norm": 0.6478316783905029, |
|
"learning_rate": 8.373352729660373e-06, |
|
"loss": 0.404, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.0255591054313098, |
|
"grad_norm": 0.5634056925773621, |
|
"learning_rate": 8.331732889760021e-06, |
|
"loss": 0.4008, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.035143769968051, |
|
"grad_norm": 0.6008002161979675, |
|
"learning_rate": 8.289693629698564e-06, |
|
"loss": 0.4069, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.0447284345047922, |
|
"grad_norm": 0.5883151292800903, |
|
"learning_rate": 8.247240241650918e-06, |
|
"loss": 0.3912, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.0543130990415335, |
|
"grad_norm": 0.6554713249206543, |
|
"learning_rate": 8.204378069925121e-06, |
|
"loss": 0.4295, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.0638977635782747, |
|
"grad_norm": 0.5875764489173889, |
|
"learning_rate": 8.16111251028955e-06, |
|
"loss": 0.4055, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.073482428115016, |
|
"grad_norm": 0.5865110754966736, |
|
"learning_rate": 8.117449009293668e-06, |
|
"loss": 0.3787, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.0830670926517572, |
|
"grad_norm": 0.7027278542518616, |
|
"learning_rate": 8.073393063582386e-06, |
|
"loss": 0.4001, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.0926517571884984, |
|
"grad_norm": 0.5848774313926697, |
|
"learning_rate": 8.0289502192041e-06, |
|
"loss": 0.4105, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.1022364217252396, |
|
"grad_norm": 0.6438045501708984, |
|
"learning_rate": 7.984126070912519e-06, |
|
"loss": 0.3861, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.1118210862619808, |
|
"grad_norm": 0.8243338465690613, |
|
"learning_rate": 7.938926261462366e-06, |
|
"loss": 0.4029, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.121405750798722, |
|
"grad_norm": 0.6301456093788147, |
|
"learning_rate": 7.89335648089903e-06, |
|
"loss": 0.422, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.1309904153354633, |
|
"grad_norm": 0.575528621673584, |
|
"learning_rate": 7.84742246584226e-06, |
|
"loss": 0.4258, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.1405750798722045, |
|
"grad_norm": 0.7289822101593018, |
|
"learning_rate": 7.801129998764014e-06, |
|
"loss": 0.3868, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.1501597444089458, |
|
"grad_norm": 0.6722568273544312, |
|
"learning_rate": 7.754484907260513e-06, |
|
"loss": 0.4185, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.159744408945687, |
|
"grad_norm": 0.6317142844200134, |
|
"learning_rate": 7.70749306331863e-06, |
|
"loss": 0.4026, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.1693290734824282, |
|
"grad_norm": 0.6734579205513, |
|
"learning_rate": 7.660160382576683e-06, |
|
"loss": 0.413, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.1789137380191694, |
|
"grad_norm": 0.6377459764480591, |
|
"learning_rate": 7.612492823579744e-06, |
|
"loss": 0.4055, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.1884984025559104, |
|
"grad_norm": 0.6760308146476746, |
|
"learning_rate": 7.564496387029532e-06, |
|
"loss": 0.3767, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.1980830670926517, |
|
"grad_norm": 0.598756730556488, |
|
"learning_rate": 7.516177115029002e-06, |
|
"loss": 0.4244, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.207667731629393, |
|
"grad_norm": 0.5769426226615906, |
|
"learning_rate": 7.467541090321735e-06, |
|
"loss": 0.4159, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.2172523961661341, |
|
"grad_norm": 0.603039026260376, |
|
"learning_rate": 7.4185944355261996e-06, |
|
"loss": 0.4345, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.2268370607028753, |
|
"grad_norm": 0.6537725925445557, |
|
"learning_rate": 7.369343312364994e-06, |
|
"loss": 0.397, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.2364217252396166, |
|
"grad_norm": 0.593711793422699, |
|
"learning_rate": 7.319793920889171e-06, |
|
"loss": 0.4171, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.2460063897763578, |
|
"grad_norm": 0.5596345663070679, |
|
"learning_rate": 7.269952498697734e-06, |
|
"loss": 0.4066, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.255591054313099, |
|
"grad_norm": 0.56202232837677, |
|
"learning_rate": 7.219825320152411e-06, |
|
"loss": 0.378, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.2651757188498403, |
|
"grad_norm": 0.5652729272842407, |
|
"learning_rate": 7.169418695587791e-06, |
|
"loss": 0.4094, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.2747603833865815, |
|
"grad_norm": 0.665550172328949, |
|
"learning_rate": 7.118738970516944e-06, |
|
"loss": 0.4206, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.2843450479233227, |
|
"grad_norm": 0.5853424668312073, |
|
"learning_rate": 7.067792524832604e-06, |
|
"loss": 0.3874, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.293929712460064, |
|
"grad_norm": 0.6019495725631714, |
|
"learning_rate": 7.016585772004026e-06, |
|
"loss": 0.4303, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.3035143769968052, |
|
"grad_norm": 0.6686872243881226, |
|
"learning_rate": 6.965125158269619e-06, |
|
"loss": 0.3832, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.3130990415335464, |
|
"grad_norm": 0.6023411750793457, |
|
"learning_rate": 6.913417161825449e-06, |
|
"loss": 0.3898, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.3226837060702876, |
|
"grad_norm": 0.5375120639801025, |
|
"learning_rate": 6.8614682920097265e-06, |
|
"loss": 0.4135, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.3322683706070286, |
|
"grad_norm": 0.5534108877182007, |
|
"learning_rate": 6.809285088483361e-06, |
|
"loss": 0.3891, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.34185303514377, |
|
"grad_norm": 0.5958161950111389, |
|
"learning_rate": 6.7568741204067145e-06, |
|
"loss": 0.425, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.351437699680511, |
|
"grad_norm": 0.5792331099510193, |
|
"learning_rate": 6.704241985612625e-06, |
|
"loss": 0.3977, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.3610223642172525, |
|
"grad_norm": 0.5817360877990723, |
|
"learning_rate": 6.651395309775837e-06, |
|
"loss": 0.3949, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.3706070287539935, |
|
"grad_norm": 0.6270766854286194, |
|
"learning_rate": 6.598340745578908e-06, |
|
"loss": 0.3939, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.3801916932907348, |
|
"grad_norm": 0.6821984648704529, |
|
"learning_rate": 6.545084971874738e-06, |
|
"loss": 0.3845, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.389776357827476, |
|
"grad_norm": 0.6132534146308899, |
|
"learning_rate": 6.491634692845781e-06, |
|
"loss": 0.399, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.3993610223642172, |
|
"grad_norm": 0.5472438931465149, |
|
"learning_rate": 6.437996637160086e-06, |
|
"loss": 0.3786, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.4089456869009584, |
|
"grad_norm": 0.5496889352798462, |
|
"learning_rate": 6.384177557124247e-06, |
|
"loss": 0.3863, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.4185303514376997, |
|
"grad_norm": 0.6177486777305603, |
|
"learning_rate": 6.330184227833376e-06, |
|
"loss": 0.3792, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.428115015974441, |
|
"grad_norm": 0.5737434029579163, |
|
"learning_rate": 6.276023446318214e-06, |
|
"loss": 0.3948, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.4376996805111821, |
|
"grad_norm": 0.5723017454147339, |
|
"learning_rate": 6.2217020306894705e-06, |
|
"loss": 0.4137, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.4472843450479234, |
|
"grad_norm": 0.6154125332832336, |
|
"learning_rate": 6.1672268192795285e-06, |
|
"loss": 0.4154, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.4568690095846646, |
|
"grad_norm": 0.56871497631073, |
|
"learning_rate": 6.112604669781572e-06, |
|
"loss": 0.4095, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.4664536741214058, |
|
"grad_norm": 0.5961901545524597, |
|
"learning_rate": 6.057842458386315e-06, |
|
"loss": 0.402, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.476038338658147, |
|
"grad_norm": 0.601497232913971, |
|
"learning_rate": 6.002947078916365e-06, |
|
"loss": 0.4026, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.4856230031948883, |
|
"grad_norm": 0.6027733683586121, |
|
"learning_rate": 5.947925441958393e-06, |
|
"loss": 0.4125, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.4952076677316293, |
|
"grad_norm": 0.5610053539276123, |
|
"learning_rate": 5.892784473993184e-06, |
|
"loss": 0.3937, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.5047923322683707, |
|
"grad_norm": 0.5642369985580444, |
|
"learning_rate": 5.837531116523683e-06, |
|
"loss": 0.3939, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.5143769968051117, |
|
"grad_norm": 0.6150848269462585, |
|
"learning_rate": 5.782172325201155e-06, |
|
"loss": 0.4254, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.5239616613418532, |
|
"grad_norm": 0.5497562885284424, |
|
"learning_rate": 5.726715068949564e-06, |
|
"loss": 0.4142, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.5335463258785942, |
|
"grad_norm": 0.5342154502868652, |
|
"learning_rate": 5.671166329088278e-06, |
|
"loss": 0.387, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.5431309904153354, |
|
"grad_norm": 0.551790714263916, |
|
"learning_rate": 5.615533098453215e-06, |
|
"loss": 0.4116, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.5527156549520766, |
|
"grad_norm": 0.5560970306396484, |
|
"learning_rate": 5.559822380516539e-06, |
|
"loss": 0.4043, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.5623003194888179, |
|
"grad_norm": 0.7323923110961914, |
|
"learning_rate": 5.504041188505022e-06, |
|
"loss": 0.3892, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.571884984025559, |
|
"grad_norm": 0.5998652577400208, |
|
"learning_rate": 5.448196544517168e-06, |
|
"loss": 0.4068, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.5814696485623003, |
|
"grad_norm": 0.586337685585022, |
|
"learning_rate": 5.392295478639226e-06, |
|
"loss": 0.4059, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.5910543130990416, |
|
"grad_norm": 0.5749455094337463, |
|
"learning_rate": 5.336345028060199e-06, |
|
"loss": 0.3849, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.6006389776357828, |
|
"grad_norm": 0.5518559217453003, |
|
"learning_rate": 5.2803522361859596e-06, |
|
"loss": 0.3861, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.610223642172524, |
|
"grad_norm": 0.6018110513687134, |
|
"learning_rate": 5.224324151752575e-06, |
|
"loss": 0.3985, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.619808306709265, |
|
"grad_norm": 0.5411421656608582, |
|
"learning_rate": 5.168267827938971e-06, |
|
"loss": 0.4005, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.6293929712460065, |
|
"grad_norm": 0.5747963190078735, |
|
"learning_rate": 5.112190321479026e-06, |
|
"loss": 0.4332, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.6389776357827475, |
|
"grad_norm": 0.6073194146156311, |
|
"learning_rate": 5.05609869177323e-06, |
|
"loss": 0.4096, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.648562300319489, |
|
"grad_norm": 0.5901669263839722, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4068, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.65814696485623, |
|
"grad_norm": 0.6021949648857117, |
|
"learning_rate": 4.943901308226771e-06, |
|
"loss": 0.394, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.6677316293929714, |
|
"grad_norm": 0.5671015977859497, |
|
"learning_rate": 4.887809678520976e-06, |
|
"loss": 0.4074, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.6773162939297124, |
|
"grad_norm": 0.5579338669776917, |
|
"learning_rate": 4.831732172061032e-06, |
|
"loss": 0.394, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.6869009584664538, |
|
"grad_norm": 0.6558188796043396, |
|
"learning_rate": 4.775675848247427e-06, |
|
"loss": 0.4043, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.6964856230031948, |
|
"grad_norm": 0.5909088850021362, |
|
"learning_rate": 4.719647763814041e-06, |
|
"loss": 0.3948, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.706070287539936, |
|
"grad_norm": 0.5313936471939087, |
|
"learning_rate": 4.663654971939802e-06, |
|
"loss": 0.4152, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.7156549520766773, |
|
"grad_norm": 0.5276142358779907, |
|
"learning_rate": 4.6077045213607765e-06, |
|
"loss": 0.4234, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.7252396166134185, |
|
"grad_norm": 0.5799081921577454, |
|
"learning_rate": 4.551803455482833e-06, |
|
"loss": 0.3971, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.7348242811501597, |
|
"grad_norm": 0.5779193043708801, |
|
"learning_rate": 4.4959588114949785e-06, |
|
"loss": 0.397, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.744408945686901, |
|
"grad_norm": 0.5751073360443115, |
|
"learning_rate": 4.4401776194834615e-06, |
|
"loss": 0.3825, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.7539936102236422, |
|
"grad_norm": 0.5396866798400879, |
|
"learning_rate": 4.384466901546786e-06, |
|
"loss": 0.413, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.7635782747603834, |
|
"grad_norm": 0.550475001335144, |
|
"learning_rate": 4.3288336709117246e-06, |
|
"loss": 0.3708, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.7731629392971247, |
|
"grad_norm": 0.5338464975357056, |
|
"learning_rate": 4.273284931050438e-06, |
|
"loss": 0.4228, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.7827476038338657, |
|
"grad_norm": 0.5433242321014404, |
|
"learning_rate": 4.217827674798845e-06, |
|
"loss": 0.3989, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.792332268370607, |
|
"grad_norm": 0.5268736481666565, |
|
"learning_rate": 4.162468883476319e-06, |
|
"loss": 0.4248, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.8019169329073481, |
|
"grad_norm": 0.5313707590103149, |
|
"learning_rate": 4.107215526006818e-06, |
|
"loss": 0.409, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.8115015974440896, |
|
"grad_norm": 0.560043215751648, |
|
"learning_rate": 4.052074558041608e-06, |
|
"loss": 0.3908, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.8210862619808306, |
|
"grad_norm": 0.5267096757888794, |
|
"learning_rate": 3.997052921083637e-06, |
|
"loss": 0.3714, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.830670926517572, |
|
"grad_norm": 0.5708348155021667, |
|
"learning_rate": 3.9421575416136866e-06, |
|
"loss": 0.3997, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.840255591054313, |
|
"grad_norm": 0.5195730328559875, |
|
"learning_rate": 3.887395330218429e-06, |
|
"loss": 0.3997, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.8498402555910545, |
|
"grad_norm": 0.5325154662132263, |
|
"learning_rate": 3.832773180720475e-06, |
|
"loss": 0.4064, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.8594249201277955, |
|
"grad_norm": 0.551060676574707, |
|
"learning_rate": 3.778297969310529e-06, |
|
"loss": 0.3995, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.8690095846645367, |
|
"grad_norm": 0.547404408454895, |
|
"learning_rate": 3.723976553681787e-06, |
|
"loss": 0.4069, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.878594249201278, |
|
"grad_norm": 0.4963686764240265, |
|
"learning_rate": 3.669815772166625e-06, |
|
"loss": 0.3995, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.8881789137380192, |
|
"grad_norm": 0.5169672966003418, |
|
"learning_rate": 3.6158224428757538e-06, |
|
"loss": 0.3937, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.8977635782747604, |
|
"grad_norm": 0.5645796656608582, |
|
"learning_rate": 3.562003362839914e-06, |
|
"loss": 0.4011, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.9073482428115016, |
|
"grad_norm": 0.5136457681655884, |
|
"learning_rate": 3.50836530715422e-06, |
|
"loss": 0.3994, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.9169329073482428, |
|
"grad_norm": 0.527316689491272, |
|
"learning_rate": 3.4549150281252635e-06, |
|
"loss": 0.3978, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.926517571884984, |
|
"grad_norm": 0.5034081935882568, |
|
"learning_rate": 3.4016592544210937e-06, |
|
"loss": 0.4142, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.9361022364217253, |
|
"grad_norm": 0.5851829051971436, |
|
"learning_rate": 3.3486046902241663e-06, |
|
"loss": 0.4212, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.9456869009584663, |
|
"grad_norm": 0.5426456332206726, |
|
"learning_rate": 3.295758014387375e-06, |
|
"loss": 0.4047, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.9552715654952078, |
|
"grad_norm": 0.49820733070373535, |
|
"learning_rate": 3.2431258795932863e-06, |
|
"loss": 0.4234, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.9648562300319488, |
|
"grad_norm": 0.505520761013031, |
|
"learning_rate": 3.1907149115166403e-06, |
|
"loss": 0.4206, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.9744408945686902, |
|
"grad_norm": 0.523756742477417, |
|
"learning_rate": 3.1385317079902743e-06, |
|
"loss": 0.368, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.9840255591054312, |
|
"grad_norm": 0.49615344405174255, |
|
"learning_rate": 3.0865828381745515e-06, |
|
"loss": 0.3858, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.9936102236421727, |
|
"grad_norm": 0.5383814573287964, |
|
"learning_rate": 3.0348748417303826e-06, |
|
"loss": 0.4036, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 2.0031948881789137, |
|
"grad_norm": 0.5455489754676819, |
|
"learning_rate": 2.9834142279959754e-06, |
|
"loss": 0.3721, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 2.012779552715655, |
|
"grad_norm": 0.5573099255561829, |
|
"learning_rate": 2.932207475167398e-06, |
|
"loss": 0.3451, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.022364217252396, |
|
"grad_norm": 0.5531795620918274, |
|
"learning_rate": 2.8812610294830568e-06, |
|
"loss": 0.3294, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 2.0319488817891376, |
|
"grad_norm": 0.49947431683540344, |
|
"learning_rate": 2.83058130441221e-06, |
|
"loss": 0.3616, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 2.0415335463258786, |
|
"grad_norm": 0.4917317032814026, |
|
"learning_rate": 2.7801746798475905e-06, |
|
"loss": 0.3446, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 2.0511182108626196, |
|
"grad_norm": 0.4955407381057739, |
|
"learning_rate": 2.7300475013022666e-06, |
|
"loss": 0.3423, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 2.060702875399361, |
|
"grad_norm": 0.5965845584869385, |
|
"learning_rate": 2.6802060791108304e-06, |
|
"loss": 0.3489, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 2.070287539936102, |
|
"grad_norm": 0.5219831466674805, |
|
"learning_rate": 2.6306566876350072e-06, |
|
"loss": 0.3175, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 2.0798722044728435, |
|
"grad_norm": 0.510310709476471, |
|
"learning_rate": 2.5814055644738013e-06, |
|
"loss": 0.362, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 2.0894568690095845, |
|
"grad_norm": 0.5322299599647522, |
|
"learning_rate": 2.532458909678266e-06, |
|
"loss": 0.3329, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 2.099041533546326, |
|
"grad_norm": 0.49356314539909363, |
|
"learning_rate": 2.483822884971e-06, |
|
"loss": 0.3881, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 2.108626198083067, |
|
"grad_norm": 0.49142566323280334, |
|
"learning_rate": 2.43550361297047e-06, |
|
"loss": 0.3194, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.1182108626198084, |
|
"grad_norm": 0.5280053019523621, |
|
"learning_rate": 2.387507176420256e-06, |
|
"loss": 0.3397, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 2.1277955271565494, |
|
"grad_norm": 0.5028843283653259, |
|
"learning_rate": 2.339839617423318e-06, |
|
"loss": 0.3779, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 2.137380191693291, |
|
"grad_norm": 0.4654610753059387, |
|
"learning_rate": 2.2925069366813718e-06, |
|
"loss": 0.3173, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 2.146964856230032, |
|
"grad_norm": 0.5069674253463745, |
|
"learning_rate": 2.245515092739488e-06, |
|
"loss": 0.3477, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 2.1565495207667733, |
|
"grad_norm": 0.49440547823905945, |
|
"learning_rate": 2.1988700012359865e-06, |
|
"loss": 0.3351, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.1661341853035143, |
|
"grad_norm": 0.48839282989501953, |
|
"learning_rate": 2.1525775341577404e-06, |
|
"loss": 0.3481, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 2.1757188498402558, |
|
"grad_norm": 0.5006738305091858, |
|
"learning_rate": 2.1066435191009717e-06, |
|
"loss": 0.3302, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 2.1853035143769968, |
|
"grad_norm": 0.5038663744926453, |
|
"learning_rate": 2.061073738537635e-06, |
|
"loss": 0.3726, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 2.194888178913738, |
|
"grad_norm": 0.4842613637447357, |
|
"learning_rate": 2.0158739290874822e-06, |
|
"loss": 0.3676, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 2.2044728434504792, |
|
"grad_norm": 0.46521812677383423, |
|
"learning_rate": 1.971049780795901e-06, |
|
"loss": 0.3372, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.2140575079872207, |
|
"grad_norm": 0.4897405505180359, |
|
"learning_rate": 1.9266069364176144e-06, |
|
"loss": 0.3273, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 2.2236421725239617, |
|
"grad_norm": 0.4693354666233063, |
|
"learning_rate": 1.8825509907063328e-06, |
|
"loss": 0.3431, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 2.2332268370607027, |
|
"grad_norm": 0.4614076018333435, |
|
"learning_rate": 1.838887489710452e-06, |
|
"loss": 0.3179, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 2.242811501597444, |
|
"grad_norm": 0.4754553735256195, |
|
"learning_rate": 1.7956219300748796e-06, |
|
"loss": 0.3279, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 2.252396166134185, |
|
"grad_norm": 0.5363587737083435, |
|
"learning_rate": 1.7527597583490825e-06, |
|
"loss": 0.3275, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 2.2619808306709266, |
|
"grad_norm": 0.47843068838119507, |
|
"learning_rate": 1.7103063703014372e-06, |
|
"loss": 0.3547, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 2.2715654952076676, |
|
"grad_norm": 0.5068241357803345, |
|
"learning_rate": 1.6682671102399806e-06, |
|
"loss": 0.3509, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 2.281150159744409, |
|
"grad_norm": 0.5344985723495483, |
|
"learning_rate": 1.6266472703396286e-06, |
|
"loss": 0.3679, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 2.29073482428115, |
|
"grad_norm": 0.5120568871498108, |
|
"learning_rate": 1.5854520899759656e-06, |
|
"loss": 0.3418, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 2.3003194888178915, |
|
"grad_norm": 0.4886165261268616, |
|
"learning_rate": 1.544686755065677e-06, |
|
"loss": 0.344, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.3099041533546325, |
|
"grad_norm": 0.4517747461795807, |
|
"learning_rate": 1.5043563974137132e-06, |
|
"loss": 0.3464, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 2.319488817891374, |
|
"grad_norm": 0.47726428508758545, |
|
"learning_rate": 1.4644660940672628e-06, |
|
"loss": 0.3485, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 2.329073482428115, |
|
"grad_norm": 0.5107493996620178, |
|
"learning_rate": 1.4250208666766235e-06, |
|
"loss": 0.3552, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 2.3386581469648564, |
|
"grad_norm": 0.4823455810546875, |
|
"learning_rate": 1.3860256808630429e-06, |
|
"loss": 0.3247, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 2.3482428115015974, |
|
"grad_norm": 0.46558883786201477, |
|
"learning_rate": 1.3474854455936126e-06, |
|
"loss": 0.3474, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 2.357827476038339, |
|
"grad_norm": 0.46056708693504333, |
|
"learning_rate": 1.3094050125632973e-06, |
|
"loss": 0.3644, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 2.36741214057508, |
|
"grad_norm": 0.4884093999862671, |
|
"learning_rate": 1.2717891755841722e-06, |
|
"loss": 0.3351, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 2.376996805111821, |
|
"grad_norm": 0.4626671075820923, |
|
"learning_rate": 1.234642669981946e-06, |
|
"loss": 0.3292, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 2.3865814696485623, |
|
"grad_norm": 0.4433649182319641, |
|
"learning_rate": 1.1979701719998454e-06, |
|
"loss": 0.3256, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 2.3961661341853033, |
|
"grad_norm": 0.4477299749851227, |
|
"learning_rate": 1.1617762982099446e-06, |
|
"loss": 0.3307, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.405750798722045, |
|
"grad_norm": 0.47525662183761597, |
|
"learning_rate": 1.1260656049319957e-06, |
|
"loss": 0.3394, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 2.415335463258786, |
|
"grad_norm": 0.44378718733787537, |
|
"learning_rate": 1.0908425876598512e-06, |
|
"loss": 0.346, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 2.4249201277955272, |
|
"grad_norm": 0.4790633022785187, |
|
"learning_rate": 1.0561116804955451e-06, |
|
"loss": 0.3408, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 2.4345047923322682, |
|
"grad_norm": 0.5277284383773804, |
|
"learning_rate": 1.0218772555910955e-06, |
|
"loss": 0.3493, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 2.4440894568690097, |
|
"grad_norm": 0.4934959411621094, |
|
"learning_rate": 9.881436225981107e-07, |
|
"loss": 0.3551, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 2.4536741214057507, |
|
"grad_norm": 0.4702779948711395, |
|
"learning_rate": 9.549150281252633e-07, |
|
"loss": 0.3277, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 2.463258785942492, |
|
"grad_norm": 0.4510821998119354, |
|
"learning_rate": 9.221956552036992e-07, |
|
"loss": 0.3291, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 2.472843450479233, |
|
"grad_norm": 0.46118900179862976, |
|
"learning_rate": 8.899896227604509e-07, |
|
"loss": 0.3445, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 2.4824281150159746, |
|
"grad_norm": 0.47450578212738037, |
|
"learning_rate": 8.58300985099918e-07, |
|
"loss": 0.3537, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 2.4920127795527156, |
|
"grad_norm": 0.46595561504364014, |
|
"learning_rate": 8.271337313934869e-07, |
|
"loss": 0.3294, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.501597444089457, |
|
"grad_norm": 0.4666687250137329, |
|
"learning_rate": 7.964917851773496e-07, |
|
"loss": 0.3487, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 2.511182108626198, |
|
"grad_norm": 0.4660246670246124, |
|
"learning_rate": 7.663790038585794e-07, |
|
"loss": 0.3438, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 2.520766773162939, |
|
"grad_norm": 0.4536135494709015, |
|
"learning_rate": 7.367991782295392e-07, |
|
"loss": 0.3251, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 2.5303514376996805, |
|
"grad_norm": 0.44076839089393616, |
|
"learning_rate": 7.077560319906696e-07, |
|
"loss": 0.3295, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 2.539936102236422, |
|
"grad_norm": 0.4548497200012207, |
|
"learning_rate": 6.792532212817271e-07, |
|
"loss": 0.3497, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 2.549520766773163, |
|
"grad_norm": 0.44671207666397095, |
|
"learning_rate": 6.512943342215234e-07, |
|
"loss": 0.35, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 2.559105431309904, |
|
"grad_norm": 0.4538073241710663, |
|
"learning_rate": 6.238828904562316e-07, |
|
"loss": 0.3604, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 2.5686900958466454, |
|
"grad_norm": 0.4668172001838684, |
|
"learning_rate": 5.9702234071631e-07, |
|
"loss": 0.3341, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 2.5782747603833864, |
|
"grad_norm": 0.4516941010951996, |
|
"learning_rate": 5.707160663821009e-07, |
|
"loss": 0.3148, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 2.587859424920128, |
|
"grad_norm": 0.4613952338695526, |
|
"learning_rate": 5.449673790581611e-07, |
|
"loss": 0.3597, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.597444089456869, |
|
"grad_norm": 0.44554054737091064, |
|
"learning_rate": 5.197795201563744e-07, |
|
"loss": 0.3133, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 2.6070287539936103, |
|
"grad_norm": 0.46121156215667725, |
|
"learning_rate": 4.951556604879049e-07, |
|
"loss": 0.3429, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 2.6166134185303513, |
|
"grad_norm": 0.42949506640434265, |
|
"learning_rate": 4.710988998640298e-07, |
|
"loss": 0.3393, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 2.626198083067093, |
|
"grad_norm": 0.44105613231658936, |
|
"learning_rate": 4.4761226670592074e-07, |
|
"loss": 0.3511, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 2.635782747603834, |
|
"grad_norm": 0.4923551082611084, |
|
"learning_rate": 4.2469871766340096e-07, |
|
"loss": 0.3198, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 2.6453674121405752, |
|
"grad_norm": 0.446719229221344, |
|
"learning_rate": 4.0236113724274716e-07, |
|
"loss": 0.3277, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 2.6549520766773163, |
|
"grad_norm": 0.47062206268310547, |
|
"learning_rate": 3.8060233744356634e-07, |
|
"loss": 0.3411, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 2.6645367412140573, |
|
"grad_norm": 0.4742945432662964, |
|
"learning_rate": 3.5942505740480583e-07, |
|
"loss": 0.3333, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 2.6741214057507987, |
|
"grad_norm": 0.42745864391326904, |
|
"learning_rate": 3.3883196305992906e-07, |
|
"loss": 0.3182, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 2.68370607028754, |
|
"grad_norm": 0.42992714047431946, |
|
"learning_rate": 3.18825646801314e-07, |
|
"loss": 0.3404, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.693290734824281, |
|
"grad_norm": 0.47815775871276855, |
|
"learning_rate": 2.9940862715390483e-07, |
|
"loss": 0.3304, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 2.702875399361022, |
|
"grad_norm": 0.46748435497283936, |
|
"learning_rate": 2.8058334845816214e-07, |
|
"loss": 0.3445, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 2.7124600638977636, |
|
"grad_norm": 0.4928760528564453, |
|
"learning_rate": 2.6235218056235633e-07, |
|
"loss": 0.3276, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 2.722044728434505, |
|
"grad_norm": 0.45993271470069885, |
|
"learning_rate": 2.447174185242324e-07, |
|
"loss": 0.3232, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 2.731629392971246, |
|
"grad_norm": 0.4380206763744354, |
|
"learning_rate": 2.276812823220964e-07, |
|
"loss": 0.3475, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 2.741214057507987, |
|
"grad_norm": 0.4382314383983612, |
|
"learning_rate": 2.1124591657534776e-07, |
|
"loss": 0.3249, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 2.7507987220447285, |
|
"grad_norm": 0.47202467918395996, |
|
"learning_rate": 1.9541339027450256e-07, |
|
"loss": 0.3558, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 2.7603833865814695, |
|
"grad_norm": 0.43734198808670044, |
|
"learning_rate": 1.801856965207338e-07, |
|
"loss": 0.3324, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 2.769968051118211, |
|
"grad_norm": 0.4321967363357544, |
|
"learning_rate": 1.6556475227496816e-07, |
|
"loss": 0.3542, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 2.779552715654952, |
|
"grad_norm": 0.4536594748497009, |
|
"learning_rate": 1.5155239811656562e-07, |
|
"loss": 0.3242, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.7891373801916934, |
|
"grad_norm": 0.42880719900131226, |
|
"learning_rate": 1.3815039801161723e-07, |
|
"loss": 0.3688, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 2.7987220447284344, |
|
"grad_norm": 0.44557759165763855, |
|
"learning_rate": 1.253604390908819e-07, |
|
"loss": 0.334, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 2.8083067092651754, |
|
"grad_norm": 0.43500256538391113, |
|
"learning_rate": 1.1318413143740436e-07, |
|
"loss": 0.3411, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 2.817891373801917, |
|
"grad_norm": 0.45296376943588257, |
|
"learning_rate": 1.0162300788382263e-07, |
|
"loss": 0.3588, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 2.8274760383386583, |
|
"grad_norm": 0.4909366965293884, |
|
"learning_rate": 9.0678523819408e-08, |
|
"loss": 0.3162, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 2.8370607028753994, |
|
"grad_norm": 0.46786975860595703, |
|
"learning_rate": 8.035205700685167e-08, |
|
"loss": 0.3454, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 2.8466453674121404, |
|
"grad_norm": 0.46253442764282227, |
|
"learning_rate": 7.064490740882057e-08, |
|
"loss": 0.346, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 2.856230031948882, |
|
"grad_norm": 0.46637165546417236, |
|
"learning_rate": 6.15582970243117e-08, |
|
"loss": 0.3368, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 2.8658146964856233, |
|
"grad_norm": 0.41861382126808167, |
|
"learning_rate": 5.3093369734816824e-08, |
|
"loss": 0.3043, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 2.8753993610223643, |
|
"grad_norm": 0.4365438222885132, |
|
"learning_rate": 4.52511911603265e-08, |
|
"loss": 0.3222, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.8849840255591053, |
|
"grad_norm": 0.4770229160785675, |
|
"learning_rate": 3.8032748525179684e-08, |
|
"loss": 0.3288, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 2.8945686900958467, |
|
"grad_norm": 0.4438874423503876, |
|
"learning_rate": 3.143895053378698e-08, |
|
"loss": 0.3584, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 2.9041533546325877, |
|
"grad_norm": 0.44970646500587463, |
|
"learning_rate": 2.547062725623828e-08, |
|
"loss": 0.3581, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 2.913738019169329, |
|
"grad_norm": 0.4476597309112549, |
|
"learning_rate": 2.012853002380466e-08, |
|
"loss": 0.339, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 2.92332268370607, |
|
"grad_norm": 0.43612033128738403, |
|
"learning_rate": 1.541333133436018e-08, |
|
"loss": 0.3492, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 2.9329073482428116, |
|
"grad_norm": 0.4451361298561096, |
|
"learning_rate": 1.132562476771959e-08, |
|
"loss": 0.3398, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 2.9424920127795526, |
|
"grad_norm": 0.4753524363040924, |
|
"learning_rate": 7.865924910916977e-09, |
|
"loss": 0.3356, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 2.952076677316294, |
|
"grad_norm": 0.43977728486061096, |
|
"learning_rate": 5.034667293427053e-09, |
|
"loss": 0.3293, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 2.961661341853035, |
|
"grad_norm": 0.43779218196868896, |
|
"learning_rate": 2.8322083323334417e-09, |
|
"loss": 0.3244, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 2.9712460063897765, |
|
"grad_norm": 0.4322354197502136, |
|
"learning_rate": 1.2588252874673469e-09, |
|
"loss": 0.3568, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.9808306709265175, |
|
"grad_norm": 0.44659778475761414, |
|
"learning_rate": 3.147162264971471e-10, |
|
"loss": 0.3667, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 2.9904153354632586, |
|
"grad_norm": 0.4485153555870056, |
|
"learning_rate": 0.0, |
|
"loss": 0.3413, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 2.9904153354632586, |
|
"step": 312, |
|
"total_flos": 2.1907068970603315e+17, |
|
"train_loss": 0.0, |
|
"train_runtime": 6.6479, |
|
"train_samples_per_second": 4512.734, |
|
"train_steps_per_second": 46.932 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 312, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.1907068970603315e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|