|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.975629569455727, |
|
"eval_steps": 500, |
|
"global_step": 380, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.012997562956945572, |
|
"grad_norm": 6.099751379930322, |
|
"learning_rate": 2.105263157894737e-06, |
|
"loss": 1.0219, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.025995125913891144, |
|
"grad_norm": 6.068929688233013, |
|
"learning_rate": 4.210526315789474e-06, |
|
"loss": 1.0148, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.03899268887083672, |
|
"grad_norm": 5.607482856386817, |
|
"learning_rate": 6.31578947368421e-06, |
|
"loss": 0.9978, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.05199025182778229, |
|
"grad_norm": 4.0344324115028956, |
|
"learning_rate": 8.421052631578948e-06, |
|
"loss": 0.9695, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.06498781478472786, |
|
"grad_norm": 2.3652828088294218, |
|
"learning_rate": 1.0526315789473684e-05, |
|
"loss": 0.9321, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.07798537774167344, |
|
"grad_norm": 4.801199261631304, |
|
"learning_rate": 1.263157894736842e-05, |
|
"loss": 0.9538, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.09098294069861901, |
|
"grad_norm": 4.831210221526376, |
|
"learning_rate": 1.4736842105263159e-05, |
|
"loss": 0.9455, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.10398050365556458, |
|
"grad_norm": 5.705571120150557, |
|
"learning_rate": 1.6842105263157896e-05, |
|
"loss": 0.9346, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.11697806661251016, |
|
"grad_norm": 4.118034247132142, |
|
"learning_rate": 1.894736842105263e-05, |
|
"loss": 0.9103, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.12997562956945571, |
|
"grad_norm": 2.7464416924594333, |
|
"learning_rate": 2.105263157894737e-05, |
|
"loss": 0.8666, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.1429731925264013, |
|
"grad_norm": 1.8112884281407549, |
|
"learning_rate": 2.3157894736842107e-05, |
|
"loss": 0.8288, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.15597075548334688, |
|
"grad_norm": 1.6168217780438021, |
|
"learning_rate": 2.526315789473684e-05, |
|
"loss": 0.8095, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.16896831844029245, |
|
"grad_norm": 1.0800910213719421, |
|
"learning_rate": 2.7368421052631583e-05, |
|
"loss": 0.7876, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.18196588139723802, |
|
"grad_norm": 1.273865117132841, |
|
"learning_rate": 2.9473684210526317e-05, |
|
"loss": 0.7801, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.19496344435418358, |
|
"grad_norm": 1.4101116134429739, |
|
"learning_rate": 3.157894736842106e-05, |
|
"loss": 0.7769, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.20796100731112915, |
|
"grad_norm": 1.3300011416464073, |
|
"learning_rate": 3.368421052631579e-05, |
|
"loss": 0.7705, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.22095857026807472, |
|
"grad_norm": 1.2303404991910958, |
|
"learning_rate": 3.578947368421053e-05, |
|
"loss": 0.7457, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.23395613322502032, |
|
"grad_norm": 1.2600947996998637, |
|
"learning_rate": 3.789473684210526e-05, |
|
"loss": 0.7414, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.2469536961819659, |
|
"grad_norm": 0.9415351295551974, |
|
"learning_rate": 4e-05, |
|
"loss": 0.731, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.25995125913891143, |
|
"grad_norm": 1.6378942120476991, |
|
"learning_rate": 4.210526315789474e-05, |
|
"loss": 0.7402, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.272948822095857, |
|
"grad_norm": 1.452156312513347, |
|
"learning_rate": 4.421052631578948e-05, |
|
"loss": 0.7293, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.2859463850528026, |
|
"grad_norm": 1.1024110144705295, |
|
"learning_rate": 4.6315789473684214e-05, |
|
"loss": 0.7143, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.29894394800974816, |
|
"grad_norm": 1.9887153689227788, |
|
"learning_rate": 4.842105263157895e-05, |
|
"loss": 0.7255, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.31194151096669376, |
|
"grad_norm": 1.1903531007119599, |
|
"learning_rate": 5.052631578947368e-05, |
|
"loss": 0.7177, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.3249390739236393, |
|
"grad_norm": 1.9904927095675595, |
|
"learning_rate": 5.263157894736843e-05, |
|
"loss": 0.7215, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.3379366368805849, |
|
"grad_norm": 1.6681859777545944, |
|
"learning_rate": 5.4736842105263165e-05, |
|
"loss": 0.7009, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.35093419983753044, |
|
"grad_norm": 1.6209941695574477, |
|
"learning_rate": 5.68421052631579e-05, |
|
"loss": 0.7052, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.36393176279447603, |
|
"grad_norm": 1.98849797370751, |
|
"learning_rate": 5.8947368421052634e-05, |
|
"loss": 0.7004, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.37692932575142163, |
|
"grad_norm": 1.12064775343047, |
|
"learning_rate": 6.105263157894738e-05, |
|
"loss": 0.6898, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.38992688870836717, |
|
"grad_norm": 2.2171415848887475, |
|
"learning_rate": 6.315789473684212e-05, |
|
"loss": 0.6866, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.40292445166531277, |
|
"grad_norm": 1.5015021280904342, |
|
"learning_rate": 6.526315789473685e-05, |
|
"loss": 0.6899, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.4159220146222583, |
|
"grad_norm": 2.1943095640217583, |
|
"learning_rate": 6.736842105263159e-05, |
|
"loss": 0.6852, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.4289195775792039, |
|
"grad_norm": 1.7052014979491807, |
|
"learning_rate": 6.947368421052632e-05, |
|
"loss": 0.6898, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.44191714053614944, |
|
"grad_norm": 1.4787351390255603, |
|
"learning_rate": 7.157894736842105e-05, |
|
"loss": 0.6812, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.45491470349309504, |
|
"grad_norm": 1.1982328674577414, |
|
"learning_rate": 7.368421052631579e-05, |
|
"loss": 0.6827, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.46791226645004064, |
|
"grad_norm": 2.055421861681658, |
|
"learning_rate": 7.578947368421052e-05, |
|
"loss": 0.6747, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.4809098294069862, |
|
"grad_norm": 1.3504861459840583, |
|
"learning_rate": 7.789473684210527e-05, |
|
"loss": 0.6697, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.4939073923639318, |
|
"grad_norm": 1.9872016361599776, |
|
"learning_rate": 8e-05, |
|
"loss": 0.6748, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.5069049553208773, |
|
"grad_norm": 1.9190088069026263, |
|
"learning_rate": 7.99983123807325e-05, |
|
"loss": 0.6721, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.5199025182778229, |
|
"grad_norm": 1.265154939225945, |
|
"learning_rate": 7.999324966533291e-05, |
|
"loss": 0.6603, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.5329000812347685, |
|
"grad_norm": 3.2164002414551978, |
|
"learning_rate": 7.998481228099806e-05, |
|
"loss": 0.6723, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.545897644191714, |
|
"grad_norm": 2.571438671486186, |
|
"learning_rate": 7.997300093968255e-05, |
|
"loss": 0.6806, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.5588952071486596, |
|
"grad_norm": 1.8156950022852132, |
|
"learning_rate": 7.995781663803876e-05, |
|
"loss": 0.6635, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.5718927701056052, |
|
"grad_norm": 1.3254069741419403, |
|
"learning_rate": 7.993926065733265e-05, |
|
"loss": 0.6637, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.5848903330625508, |
|
"grad_norm": 0.9530737655919242, |
|
"learning_rate": 7.991733456333579e-05, |
|
"loss": 0.6636, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.5978878960194963, |
|
"grad_norm": 1.3664556141065045, |
|
"learning_rate": 7.98920402061931e-05, |
|
"loss": 0.6616, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.6108854589764419, |
|
"grad_norm": 1.2839159067727302, |
|
"learning_rate": 7.98633797202668e-05, |
|
"loss": 0.6656, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.6238830219333875, |
|
"grad_norm": 1.1439614314511357, |
|
"learning_rate": 7.98313555239563e-05, |
|
"loss": 0.6639, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.6368805848903331, |
|
"grad_norm": 1.8097099672517978, |
|
"learning_rate": 7.979597031949415e-05, |
|
"loss": 0.6788, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.6498781478472786, |
|
"grad_norm": 1.0800803816613773, |
|
"learning_rate": 7.975722709271799e-05, |
|
"loss": 0.6578, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.6628757108042242, |
|
"grad_norm": 2.014427030879868, |
|
"learning_rate": 7.97151291128186e-05, |
|
"loss": 0.664, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.6758732737611698, |
|
"grad_norm": 1.5251902643366386, |
|
"learning_rate": 7.96696799320641e-05, |
|
"loss": 0.6617, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.6888708367181153, |
|
"grad_norm": 1.4609799533681953, |
|
"learning_rate": 7.962088338550013e-05, |
|
"loss": 0.6544, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.7018683996750609, |
|
"grad_norm": 1.2857979651137834, |
|
"learning_rate": 7.956874359062632e-05, |
|
"loss": 0.6563, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.7148659626320065, |
|
"grad_norm": 1.167851324111819, |
|
"learning_rate": 7.951326494704878e-05, |
|
"loss": 0.6432, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.7278635255889521, |
|
"grad_norm": 0.923367616793914, |
|
"learning_rate": 7.94544521361089e-05, |
|
"loss": 0.6458, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.7408610885458976, |
|
"grad_norm": 1.2536213795566458, |
|
"learning_rate": 7.939231012048833e-05, |
|
"loss": 0.6483, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.7538586515028433, |
|
"grad_norm": 1.2119787729723712, |
|
"learning_rate": 7.932684414379021e-05, |
|
"loss": 0.6461, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.7668562144597888, |
|
"grad_norm": 0.7624960645952309, |
|
"learning_rate": 7.925805973009672e-05, |
|
"loss": 0.6347, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.7798537774167343, |
|
"grad_norm": 0.9917480604742027, |
|
"learning_rate": 7.918596268350296e-05, |
|
"loss": 0.639, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.7928513403736799, |
|
"grad_norm": 1.3671397849973843, |
|
"learning_rate": 7.911055908762718e-05, |
|
"loss": 0.6416, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.8058489033306255, |
|
"grad_norm": 0.7823232773420852, |
|
"learning_rate": 7.903185530509743e-05, |
|
"loss": 0.633, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.8188464662875711, |
|
"grad_norm": 0.8711197066128451, |
|
"learning_rate": 7.894985797701472e-05, |
|
"loss": 0.6348, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.8318440292445166, |
|
"grad_norm": 1.0138573543907548, |
|
"learning_rate": 7.886457402239256e-05, |
|
"loss": 0.6317, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.8448415922014623, |
|
"grad_norm": 1.328902077693916, |
|
"learning_rate": 7.877601063757323e-05, |
|
"loss": 0.641, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.8578391551584078, |
|
"grad_norm": 0.8975115888522265, |
|
"learning_rate": 7.868417529562043e-05, |
|
"loss": 0.6225, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.8708367181153533, |
|
"grad_norm": 0.9433279422872343, |
|
"learning_rate": 7.858907574568882e-05, |
|
"loss": 0.6318, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.8838342810722989, |
|
"grad_norm": 0.9051780877745009, |
|
"learning_rate": 7.849072001237001e-05, |
|
"loss": 0.623, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.8968318440292445, |
|
"grad_norm": 1.1238293364288126, |
|
"learning_rate": 7.838911639501557e-05, |
|
"loss": 0.6197, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.9098294069861901, |
|
"grad_norm": 0.9616875737950008, |
|
"learning_rate": 7.828427346703657e-05, |
|
"loss": 0.6151, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.9228269699431356, |
|
"grad_norm": 0.7925602555435675, |
|
"learning_rate": 7.81762000751803e-05, |
|
"loss": 0.6204, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.9358245329000813, |
|
"grad_norm": 0.8202368746016689, |
|
"learning_rate": 7.806490533878368e-05, |
|
"loss": 0.6206, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.9488220958570268, |
|
"grad_norm": 0.7281381020297469, |
|
"learning_rate": 7.795039864900378e-05, |
|
"loss": 0.6145, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.9618196588139724, |
|
"grad_norm": 0.951203457585769, |
|
"learning_rate": 7.783268966802539e-05, |
|
"loss": 0.6271, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.974817221770918, |
|
"grad_norm": 1.079648600019301, |
|
"learning_rate": 7.771178832824573e-05, |
|
"loss": 0.6142, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.9878147847278635, |
|
"grad_norm": 0.7728669124672748, |
|
"learning_rate": 7.758770483143634e-05, |
|
"loss": 0.6123, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 1.0024370430544274, |
|
"grad_norm": 0.9574347690194315, |
|
"learning_rate": 7.74604496478822e-05, |
|
"loss": 0.615, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 1.0154346060113728, |
|
"grad_norm": 1.0135972450552604, |
|
"learning_rate": 7.733003351549829e-05, |
|
"loss": 0.5888, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.0284321689683185, |
|
"grad_norm": 1.1758211244717183, |
|
"learning_rate": 7.719646743892352e-05, |
|
"loss": 0.5932, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.0414297319252641, |
|
"grad_norm": 1.0448938050043781, |
|
"learning_rate": 7.705976268859207e-05, |
|
"loss": 0.5934, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.0544272948822095, |
|
"grad_norm": 0.9104488344215643, |
|
"learning_rate": 7.691993079978252e-05, |
|
"loss": 0.5815, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.0674248578391552, |
|
"grad_norm": 0.7412780882809253, |
|
"learning_rate": 7.677698357164431e-05, |
|
"loss": 0.5752, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.0804224207961006, |
|
"grad_norm": 0.7417024796170562, |
|
"learning_rate": 7.663093306620231e-05, |
|
"loss": 0.5792, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.0934199837530463, |
|
"grad_norm": 0.7190010808453768, |
|
"learning_rate": 7.648179160733883e-05, |
|
"loss": 0.5753, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.106417546709992, |
|
"grad_norm": 0.7427027225824168, |
|
"learning_rate": 7.632957177975387e-05, |
|
"loss": 0.5777, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.1194151096669374, |
|
"grad_norm": 0.9295269491982405, |
|
"learning_rate": 7.61742864279031e-05, |
|
"loss": 0.5834, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 1.132412672623883, |
|
"grad_norm": 0.9634417055121992, |
|
"learning_rate": 7.601594865491414e-05, |
|
"loss": 0.5742, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 1.1454102355808287, |
|
"grad_norm": 0.9476309894224839, |
|
"learning_rate": 7.585457182148081e-05, |
|
"loss": 0.5784, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.158407798537774, |
|
"grad_norm": 0.9101385250862156, |
|
"learning_rate": 7.569016954473577e-05, |
|
"loss": 0.5826, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.1714053614947197, |
|
"grad_norm": 0.9143144480170803, |
|
"learning_rate": 7.552275569710152e-05, |
|
"loss": 0.5852, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.1844029244516654, |
|
"grad_norm": 0.6885600525748597, |
|
"learning_rate": 7.535234440511979e-05, |
|
"loss": 0.5763, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.1974004874086108, |
|
"grad_norm": 0.5214486371121746, |
|
"learning_rate": 7.517895004825956e-05, |
|
"loss": 0.5787, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.2103980503655565, |
|
"grad_norm": 0.758408482295764, |
|
"learning_rate": 7.500258725770375e-05, |
|
"loss": 0.5746, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.2233956133225021, |
|
"grad_norm": 0.7125338265833929, |
|
"learning_rate": 7.48232709151145e-05, |
|
"loss": 0.5703, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.2363931762794476, |
|
"grad_norm": 0.6377296028835163, |
|
"learning_rate": 7.464101615137756e-05, |
|
"loss": 0.579, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.2493907392363932, |
|
"grad_norm": 0.5870416970514215, |
|
"learning_rate": 7.445583834532546e-05, |
|
"loss": 0.5637, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.2623883021933389, |
|
"grad_norm": 0.4860633167448116, |
|
"learning_rate": 7.426775312243986e-05, |
|
"loss": 0.5756, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.2753858651502843, |
|
"grad_norm": 0.5083572483496955, |
|
"learning_rate": 7.407677635353308e-05, |
|
"loss": 0.5734, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.28838342810723, |
|
"grad_norm": 0.5305435155519614, |
|
"learning_rate": 7.388292415340888e-05, |
|
"loss": 0.5682, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.3013809910641756, |
|
"grad_norm": 0.4176759940201937, |
|
"learning_rate": 7.368621287950264e-05, |
|
"loss": 0.5643, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.314378554021121, |
|
"grad_norm": 0.4583116745957642, |
|
"learning_rate": 7.348665913050115e-05, |
|
"loss": 0.5602, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.3273761169780667, |
|
"grad_norm": 0.5426624427041704, |
|
"learning_rate": 7.328427974494201e-05, |
|
"loss": 0.5672, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.340373679935012, |
|
"grad_norm": 0.4189086182858418, |
|
"learning_rate": 7.307909179979274e-05, |
|
"loss": 0.5648, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.3533712428919578, |
|
"grad_norm": 0.39949243757403974, |
|
"learning_rate": 7.28711126090098e-05, |
|
"loss": 0.566, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.3663688058489034, |
|
"grad_norm": 0.43131027290320334, |
|
"learning_rate": 7.266035972207773e-05, |
|
"loss": 0.5571, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.3793663688058488, |
|
"grad_norm": 0.40582965588538134, |
|
"learning_rate": 7.24468509225281e-05, |
|
"loss": 0.5693, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.3923639317627945, |
|
"grad_norm": 0.4761372341291519, |
|
"learning_rate": 7.223060422643914e-05, |
|
"loss": 0.5686, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.40536149471974, |
|
"grad_norm": 0.5727339140771481, |
|
"learning_rate": 7.201163788091536e-05, |
|
"loss": 0.5678, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.4183590576766856, |
|
"grad_norm": 0.7085163969661245, |
|
"learning_rate": 7.178997036254799e-05, |
|
"loss": 0.5683, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.4313566206336312, |
|
"grad_norm": 0.80819559722067, |
|
"learning_rate": 7.156562037585576e-05, |
|
"loss": 0.5665, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.4443541835905767, |
|
"grad_norm": 0.793253904366816, |
|
"learning_rate": 7.133860685170665e-05, |
|
"loss": 0.5745, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.4573517465475223, |
|
"grad_norm": 0.7256059178824779, |
|
"learning_rate": 7.110894894572056e-05, |
|
"loss": 0.5645, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.470349309504468, |
|
"grad_norm": 0.5014971965016265, |
|
"learning_rate": 7.087666603665284e-05, |
|
"loss": 0.5606, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.4833468724614134, |
|
"grad_norm": 0.43914912895871583, |
|
"learning_rate": 7.064177772475912e-05, |
|
"loss": 0.5674, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.496344435418359, |
|
"grad_norm": 0.4961335507396086, |
|
"learning_rate": 7.040430383014146e-05, |
|
"loss": 0.5655, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.5093419983753047, |
|
"grad_norm": 0.5185842695541142, |
|
"learning_rate": 7.016426439107586e-05, |
|
"loss": 0.5653, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.5223395613322501, |
|
"grad_norm": 0.37245930997617116, |
|
"learning_rate": 6.992167966232143e-05, |
|
"loss": 0.5567, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.5353371242891958, |
|
"grad_norm": 0.5310947508784376, |
|
"learning_rate": 6.967657011341126e-05, |
|
"loss": 0.575, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.5483346872461414, |
|
"grad_norm": 0.7628101414612303, |
|
"learning_rate": 6.942895642692527e-05, |
|
"loss": 0.5624, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.5613322502030869, |
|
"grad_norm": 0.7910046561291942, |
|
"learning_rate": 6.917885949674483e-05, |
|
"loss": 0.5667, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.5743298131600325, |
|
"grad_norm": 0.7246609366699134, |
|
"learning_rate": 6.892630042628988e-05, |
|
"loss": 0.557, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.5873273761169782, |
|
"grad_norm": 0.6852631021753094, |
|
"learning_rate": 6.867130052673806e-05, |
|
"loss": 0.5638, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.6003249390739236, |
|
"grad_norm": 0.6947100326982237, |
|
"learning_rate": 6.841388131522656e-05, |
|
"loss": 0.5626, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.6133225020308692, |
|
"grad_norm": 0.5891017692387462, |
|
"learning_rate": 6.815406451303647e-05, |
|
"loss": 0.5632, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.626320064987815, |
|
"grad_norm": 0.44282420651875987, |
|
"learning_rate": 6.789187204375981e-05, |
|
"loss": 0.5643, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.6393176279447603, |
|
"grad_norm": 0.3540188196234301, |
|
"learning_rate": 6.762732603144978e-05, |
|
"loss": 0.5611, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.652315190901706, |
|
"grad_norm": 0.38606767224591043, |
|
"learning_rate": 6.736044879875373e-05, |
|
"loss": 0.5548, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.6653127538586516, |
|
"grad_norm": 0.4847276958422751, |
|
"learning_rate": 6.709126286502965e-05, |
|
"loss": 0.5634, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.678310316815597, |
|
"grad_norm": 0.48098159010027874, |
|
"learning_rate": 6.681979094444596e-05, |
|
"loss": 0.558, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.6913078797725425, |
|
"grad_norm": 0.35223251057931265, |
|
"learning_rate": 6.654605594406486e-05, |
|
"loss": 0.5584, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.7043054427294884, |
|
"grad_norm": 0.3582278064607583, |
|
"learning_rate": 6.627008096190938e-05, |
|
"loss": 0.5581, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.7173030056864338, |
|
"grad_norm": 0.4148482135356544, |
|
"learning_rate": 6.59918892850144e-05, |
|
"loss": 0.5604, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.7303005686433792, |
|
"grad_norm": 0.5160390925469622, |
|
"learning_rate": 6.571150438746157e-05, |
|
"loss": 0.5578, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.743298131600325, |
|
"grad_norm": 0.6691571622471794, |
|
"learning_rate": 6.542894992839873e-05, |
|
"loss": 0.5521, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.7562956945572705, |
|
"grad_norm": 0.7398206439178091, |
|
"learning_rate": 6.514424975004329e-05, |
|
"loss": 0.5559, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.769293257514216, |
|
"grad_norm": 0.6904604759405913, |
|
"learning_rate": 6.48574278756706e-05, |
|
"loss": 0.556, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.7822908204711616, |
|
"grad_norm": 0.5927997707542059, |
|
"learning_rate": 6.456850850758673e-05, |
|
"loss": 0.5511, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.7952883834281073, |
|
"grad_norm": 0.598576202040419, |
|
"learning_rate": 6.427751602508628e-05, |
|
"loss": 0.5517, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.8082859463850527, |
|
"grad_norm": 0.5752299054086354, |
|
"learning_rate": 6.398447498239527e-05, |
|
"loss": 0.5603, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.8212835093419983, |
|
"grad_norm": 0.47947265005962353, |
|
"learning_rate": 6.368941010659921e-05, |
|
"loss": 0.5583, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.834281072298944, |
|
"grad_norm": 0.3589925554258543, |
|
"learning_rate": 6.339234629555655e-05, |
|
"loss": 0.5566, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.8472786352558894, |
|
"grad_norm": 0.3418394937521007, |
|
"learning_rate": 6.309330861579786e-05, |
|
"loss": 0.5592, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.860276198212835, |
|
"grad_norm": 0.4539321991211633, |
|
"learning_rate": 6.279232230041065e-05, |
|
"loss": 0.5544, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.8732737611697807, |
|
"grad_norm": 0.44216438200356556, |
|
"learning_rate": 6.248941274691017e-05, |
|
"loss": 0.5519, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.8862713241267262, |
|
"grad_norm": 0.3237161862715092, |
|
"learning_rate": 6.218460551509636e-05, |
|
"loss": 0.5471, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.8992688870836718, |
|
"grad_norm": 0.3051484675988478, |
|
"learning_rate": 6.18779263248971e-05, |
|
"loss": 0.5551, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.9122664500406175, |
|
"grad_norm": 0.459140656732003, |
|
"learning_rate": 6.156940105419785e-05, |
|
"loss": 0.5491, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.925264012997563, |
|
"grad_norm": 0.5335980534072164, |
|
"learning_rate": 6.125905573665824e-05, |
|
"loss": 0.5515, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.9382615759545085, |
|
"grad_norm": 0.4199748973939331, |
|
"learning_rate": 6.094691655951512e-05, |
|
"loss": 0.554, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.9512591389114542, |
|
"grad_norm": 0.37821939152611067, |
|
"learning_rate": 6.063300986137297e-05, |
|
"loss": 0.5515, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.9642567018683996, |
|
"grad_norm": 0.4525799105101174, |
|
"learning_rate": 6.0317362129981375e-05, |
|
"loss": 0.5514, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.9772542648253453, |
|
"grad_norm": 0.4866209265488796, |
|
"learning_rate": 6.000000000000001e-05, |
|
"loss": 0.5557, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.990251827782291, |
|
"grad_norm": 0.4881518290915675, |
|
"learning_rate": 5.968095025075114e-05, |
|
"loss": 0.5581, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 2.0048740861088548, |
|
"grad_norm": 0.3855836959947548, |
|
"learning_rate": 5.936023980395997e-05, |
|
"loss": 0.5411, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 2.0178716490658, |
|
"grad_norm": 0.3731193378171015, |
|
"learning_rate": 5.903789572148295e-05, |
|
"loss": 0.5115, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 2.0308692120227456, |
|
"grad_norm": 0.4760587483033507, |
|
"learning_rate": 5.871394520302432e-05, |
|
"loss": 0.5036, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 2.0438667749796915, |
|
"grad_norm": 0.5788173180313999, |
|
"learning_rate": 5.838841558384091e-05, |
|
"loss": 0.5023, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 2.056864337936637, |
|
"grad_norm": 0.610294677890403, |
|
"learning_rate": 5.806133433243558e-05, |
|
"loss": 0.5115, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 2.0698619008935824, |
|
"grad_norm": 0.5785376971559186, |
|
"learning_rate": 5.7732729048239444e-05, |
|
"loss": 0.5059, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 2.0828594638505282, |
|
"grad_norm": 0.6361199902732823, |
|
"learning_rate": 5.740262745928293e-05, |
|
"loss": 0.5094, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.0958570268074737, |
|
"grad_norm": 0.7055783417389399, |
|
"learning_rate": 5.707105741985615e-05, |
|
"loss": 0.5099, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 2.108854589764419, |
|
"grad_norm": 0.7551674489031295, |
|
"learning_rate": 5.673804690815845e-05, |
|
"loss": 0.5115, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 2.121852152721365, |
|
"grad_norm": 0.7239995903013289, |
|
"learning_rate": 5.6403624023937614e-05, |
|
"loss": 0.5031, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 2.1348497156783104, |
|
"grad_norm": 0.509463011523919, |
|
"learning_rate": 5.606781698611879e-05, |
|
"loss": 0.5021, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 2.147847278635256, |
|
"grad_norm": 0.4210575883324648, |
|
"learning_rate": 5.573065413042333e-05, |
|
"loss": 0.5053, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 2.1608448415922012, |
|
"grad_norm": 0.5009273026595834, |
|
"learning_rate": 5.5392163906977835e-05, |
|
"loss": 0.507, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 2.173842404549147, |
|
"grad_norm": 0.47375681034352735, |
|
"learning_rate": 5.505237487791343e-05, |
|
"loss": 0.5042, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 2.1868399675060926, |
|
"grad_norm": 0.37667913409010934, |
|
"learning_rate": 5.471131571495574e-05, |
|
"loss": 0.5006, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 2.199837530463038, |
|
"grad_norm": 0.292819297582803, |
|
"learning_rate": 5.4369015197005506e-05, |
|
"loss": 0.5102, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 2.212835093419984, |
|
"grad_norm": 0.4148005226049645, |
|
"learning_rate": 5.4025502207710184e-05, |
|
"loss": 0.5061, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 2.2258326563769293, |
|
"grad_norm": 0.34689504337022264, |
|
"learning_rate": 5.368080573302676e-05, |
|
"loss": 0.5016, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 2.2388302193338747, |
|
"grad_norm": 0.3094307656424445, |
|
"learning_rate": 5.333495485877583e-05, |
|
"loss": 0.5014, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 2.2518277822908206, |
|
"grad_norm": 0.3739855090857637, |
|
"learning_rate": 5.298797876818735e-05, |
|
"loss": 0.501, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 2.264825345247766, |
|
"grad_norm": 0.3427237300205951, |
|
"learning_rate": 5.263990673943811e-05, |
|
"loss": 0.5022, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 2.2778229082047114, |
|
"grad_norm": 0.2853871717746141, |
|
"learning_rate": 5.229076814318122e-05, |
|
"loss": 0.5024, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 2.2908204711616573, |
|
"grad_norm": 0.30137576187728127, |
|
"learning_rate": 5.194059244006779e-05, |
|
"loss": 0.4956, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 2.3038180341186028, |
|
"grad_norm": 0.34898425966594765, |
|
"learning_rate": 5.158940917826099e-05, |
|
"loss": 0.5048, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 2.316815597075548, |
|
"grad_norm": 0.3375764565690276, |
|
"learning_rate": 5.123724799094279e-05, |
|
"loss": 0.4971, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 2.329813160032494, |
|
"grad_norm": 0.3526352081558193, |
|
"learning_rate": 5.088413859381341e-05, |
|
"loss": 0.4953, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 2.3428107229894395, |
|
"grad_norm": 0.2669067122173656, |
|
"learning_rate": 5.053011078258397e-05, |
|
"loss": 0.4988, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 2.355808285946385, |
|
"grad_norm": 0.2634804890652626, |
|
"learning_rate": 5.017519443046226e-05, |
|
"loss": 0.5062, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 2.368805848903331, |
|
"grad_norm": 0.3884943325282365, |
|
"learning_rate": 4.981941948563197e-05, |
|
"loss": 0.4947, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 2.381803411860276, |
|
"grad_norm": 0.30824639305661866, |
|
"learning_rate": 4.94628159687257e-05, |
|
"loss": 0.4959, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 2.3948009748172217, |
|
"grad_norm": 0.2779177232068281, |
|
"learning_rate": 4.9105413970291747e-05, |
|
"loss": 0.5004, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 2.4077985377741675, |
|
"grad_norm": 0.2542309798243027, |
|
"learning_rate": 4.874724364825504e-05, |
|
"loss": 0.5059, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 2.420796100731113, |
|
"grad_norm": 0.2377609454443894, |
|
"learning_rate": 4.8388335225372416e-05, |
|
"loss": 0.5047, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 2.4337936636880584, |
|
"grad_norm": 0.2783658709624058, |
|
"learning_rate": 4.802871898668237e-05, |
|
"loss": 0.5019, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 2.4467912266450043, |
|
"grad_norm": 0.212526701822668, |
|
"learning_rate": 4.7668425276949546e-05, |
|
"loss": 0.5055, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 2.4597887896019497, |
|
"grad_norm": 0.23513126552745808, |
|
"learning_rate": 4.730748449810429e-05, |
|
"loss": 0.4955, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 2.472786352558895, |
|
"grad_norm": 0.24679411263913667, |
|
"learning_rate": 4.694592710667723e-05, |
|
"loss": 0.4954, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 2.4857839155158405, |
|
"grad_norm": 0.24566735026281666, |
|
"learning_rate": 4.658378361122936e-05, |
|
"loss": 0.4991, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 2.4987814784727864, |
|
"grad_norm": 0.18986094039099186, |
|
"learning_rate": 4.622108456977773e-05, |
|
"loss": 0.5029, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 2.511779041429732, |
|
"grad_norm": 0.23326957776437438, |
|
"learning_rate": 4.585786058721687e-05, |
|
"loss": 0.4995, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 2.5247766043866777, |
|
"grad_norm": 0.3107226298797518, |
|
"learning_rate": 4.549414231273633e-05, |
|
"loss": 0.5026, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 2.537774167343623, |
|
"grad_norm": 0.22719154064847447, |
|
"learning_rate": 4.512996043723453e-05, |
|
"loss": 0.5016, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 2.5507717303005686, |
|
"grad_norm": 0.1833290747151392, |
|
"learning_rate": 4.476534569072895e-05, |
|
"loss": 0.5015, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 2.563769293257514, |
|
"grad_norm": 0.18170499366430282, |
|
"learning_rate": 4.440032883976318e-05, |
|
"loss": 0.4987, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 2.57676685621446, |
|
"grad_norm": 0.22937959483532433, |
|
"learning_rate": 4.403494068481074e-05, |
|
"loss": 0.497, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 2.5897644191714053, |
|
"grad_norm": 0.20103120503973224, |
|
"learning_rate": 4.3669212057676145e-05, |
|
"loss": 0.5039, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 2.602761982128351, |
|
"grad_norm": 0.19185288266242695, |
|
"learning_rate": 4.33031738188933e-05, |
|
"loss": 0.494, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.6157595450852966, |
|
"grad_norm": 0.17010477360475862, |
|
"learning_rate": 4.293685685512142e-05, |
|
"loss": 0.4983, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 2.628757108042242, |
|
"grad_norm": 0.22826164302714383, |
|
"learning_rate": 4.257029207653881e-05, |
|
"loss": 0.4973, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 2.6417546709991875, |
|
"grad_norm": 0.1999552754693765, |
|
"learning_rate": 4.220351041423462e-05, |
|
"loss": 0.4965, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 2.6547522339561334, |
|
"grad_norm": 0.16313332181232204, |
|
"learning_rate": 4.183654281759888e-05, |
|
"loss": 0.5064, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 2.667749796913079, |
|
"grad_norm": 0.2067882657675044, |
|
"learning_rate": 4.1469420251710905e-05, |
|
"loss": 0.5006, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 2.680747359870024, |
|
"grad_norm": 0.19027529910668617, |
|
"learning_rate": 4.110217369472649e-05, |
|
"loss": 0.505, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 2.69374492282697, |
|
"grad_norm": 0.2032049934064908, |
|
"learning_rate": 4.07348341352639e-05, |
|
"loss": 0.4931, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 2.7067424857839155, |
|
"grad_norm": 0.16941618101686762, |
|
"learning_rate": 4.0367432569789065e-05, |
|
"loss": 0.4932, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 2.719740048740861, |
|
"grad_norm": 0.17079677319469672, |
|
"learning_rate": 4e-05, |
|
"loss": 0.4903, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 2.732737611697807, |
|
"grad_norm": 0.19810836153539363, |
|
"learning_rate": 3.963256743021095e-05, |
|
"loss": 0.4938, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.7457351746547523, |
|
"grad_norm": 0.16838352156942718, |
|
"learning_rate": 3.92651658647361e-05, |
|
"loss": 0.4932, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 2.7587327376116977, |
|
"grad_norm": 0.19393287722628685, |
|
"learning_rate": 3.889782630527353e-05, |
|
"loss": 0.501, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 2.7717303005686436, |
|
"grad_norm": 0.2322999523417372, |
|
"learning_rate": 3.853057974828911e-05, |
|
"loss": 0.5028, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 2.784727863525589, |
|
"grad_norm": 0.17963554890231898, |
|
"learning_rate": 3.816345718240113e-05, |
|
"loss": 0.4966, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 2.7977254264825344, |
|
"grad_norm": 0.17972346659387642, |
|
"learning_rate": 3.779648958576538e-05, |
|
"loss": 0.4971, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 2.81072298943948, |
|
"grad_norm": 0.18567150272224223, |
|
"learning_rate": 3.74297079234612e-05, |
|
"loss": 0.5, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 2.8237205523964257, |
|
"grad_norm": 0.13962164511716743, |
|
"learning_rate": 3.706314314487859e-05, |
|
"loss": 0.5029, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 2.836718115353371, |
|
"grad_norm": 0.15495008282651787, |
|
"learning_rate": 3.669682618110671e-05, |
|
"loss": 0.4963, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 2.849715678310317, |
|
"grad_norm": 0.14169366024663888, |
|
"learning_rate": 3.6330787942323855e-05, |
|
"loss": 0.4975, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 2.8627132412672625, |
|
"grad_norm": 0.17858977296408154, |
|
"learning_rate": 3.5965059315189274e-05, |
|
"loss": 0.5001, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.875710804224208, |
|
"grad_norm": 0.16013806431323374, |
|
"learning_rate": 3.559967116023683e-05, |
|
"loss": 0.5015, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 2.8887083671811533, |
|
"grad_norm": 0.17965163443416698, |
|
"learning_rate": 3.523465430927106e-05, |
|
"loss": 0.4978, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 2.901705930138099, |
|
"grad_norm": 0.180187277461439, |
|
"learning_rate": 3.4870039562765475e-05, |
|
"loss": 0.4974, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 2.9147034930950446, |
|
"grad_norm": 0.1838861258279284, |
|
"learning_rate": 3.4505857687263675e-05, |
|
"loss": 0.4921, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 2.9277010560519905, |
|
"grad_norm": 0.18286519398138992, |
|
"learning_rate": 3.414213941278314e-05, |
|
"loss": 0.5012, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.940698619008936, |
|
"grad_norm": 0.1574518878317098, |
|
"learning_rate": 3.377891543022229e-05, |
|
"loss": 0.4967, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 2.9536961819658814, |
|
"grad_norm": 0.18504753095964077, |
|
"learning_rate": 3.341621638877064e-05, |
|
"loss": 0.5026, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 2.966693744922827, |
|
"grad_norm": 0.1576894962337943, |
|
"learning_rate": 3.305407289332279e-05, |
|
"loss": 0.4944, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 2.9796913078797727, |
|
"grad_norm": 0.1467985899864142, |
|
"learning_rate": 3.269251550189573e-05, |
|
"loss": 0.4954, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 2.992688870836718, |
|
"grad_norm": 0.1515755081778688, |
|
"learning_rate": 3.2331574723050474e-05, |
|
"loss": 0.5042, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 3.0389926888708367, |
|
"grad_norm": 0.2765548168210541, |
|
"learning_rate": 3.197128101331764e-05, |
|
"loss": 0.4549, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 3.051990251827782, |
|
"grad_norm": 0.19466505214102667, |
|
"learning_rate": 3.161166477462759e-05, |
|
"loss": 0.4548, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 3.064987814784728, |
|
"grad_norm": 0.2880161957309376, |
|
"learning_rate": 3.125275635174497e-05, |
|
"loss": 0.4582, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 3.0779853777416735, |
|
"grad_norm": 0.23822280817514718, |
|
"learning_rate": 3.089458602970828e-05, |
|
"loss": 0.453, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 3.090982940698619, |
|
"grad_norm": 0.25800133083185917, |
|
"learning_rate": 3.0537184031274306e-05, |
|
"loss": 0.4547, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 3.1039805036555648, |
|
"grad_norm": 0.2384112705824712, |
|
"learning_rate": 3.0180580514368037e-05, |
|
"loss": 0.4586, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 3.11697806661251, |
|
"grad_norm": 0.24369875451596973, |
|
"learning_rate": 2.9824805569537747e-05, |
|
"loss": 0.4551, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 3.1299756295694556, |
|
"grad_norm": 0.24117461954872021, |
|
"learning_rate": 2.9469889217416045e-05, |
|
"loss": 0.4526, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 3.1429731925264015, |
|
"grad_norm": 0.20788905500839922, |
|
"learning_rate": 2.9115861406186593e-05, |
|
"loss": 0.4543, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 3.155970755483347, |
|
"grad_norm": 0.20899161474751476, |
|
"learning_rate": 2.8762752009057232e-05, |
|
"loss": 0.4523, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 3.1689683184402924, |
|
"grad_norm": 0.21136922172757902, |
|
"learning_rate": 2.841059082173902e-05, |
|
"loss": 0.4551, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 3.181965881397238, |
|
"grad_norm": 0.20064139424506391, |
|
"learning_rate": 2.805940755993223e-05, |
|
"loss": 0.4522, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 3.1949634443541837, |
|
"grad_norm": 0.18215269955576766, |
|
"learning_rate": 2.770923185681878e-05, |
|
"loss": 0.4543, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 3.207961007311129, |
|
"grad_norm": 0.20092168632579746, |
|
"learning_rate": 2.7360093260561904e-05, |
|
"loss": 0.4547, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 3.2209585702680745, |
|
"grad_norm": 0.15760927431037322, |
|
"learning_rate": 2.7012021231812666e-05, |
|
"loss": 0.4485, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 3.2339561332250204, |
|
"grad_norm": 0.1905855267036173, |
|
"learning_rate": 2.6665045141224193e-05, |
|
"loss": 0.4514, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 3.246953696181966, |
|
"grad_norm": 0.15059676552502058, |
|
"learning_rate": 2.6319194266973256e-05, |
|
"loss": 0.4587, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 3.2599512591389113, |
|
"grad_norm": 0.18536963620839997, |
|
"learning_rate": 2.597449779228983e-05, |
|
"loss": 0.4534, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 3.272948822095857, |
|
"grad_norm": 0.17274276141163067, |
|
"learning_rate": 2.563098480299451e-05, |
|
"loss": 0.457, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 3.2859463850528026, |
|
"grad_norm": 0.1562543243595631, |
|
"learning_rate": 2.5288684285044283e-05, |
|
"loss": 0.4549, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 3.298943948009748, |
|
"grad_norm": 0.17358175772296236, |
|
"learning_rate": 2.4947625122086585e-05, |
|
"loss": 0.4548, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 3.311941510966694, |
|
"grad_norm": 0.17184079642105152, |
|
"learning_rate": 2.460783609302218e-05, |
|
"loss": 0.4521, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 3.3249390739236393, |
|
"grad_norm": 0.16030118314009942, |
|
"learning_rate": 2.4269345869576676e-05, |
|
"loss": 0.4469, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 3.3379366368805847, |
|
"grad_norm": 0.16681227025456086, |
|
"learning_rate": 2.393218301388123e-05, |
|
"loss": 0.4532, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 3.3509341998375306, |
|
"grad_norm": 0.13889802201519397, |
|
"learning_rate": 2.35963759760624e-05, |
|
"loss": 0.4524, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 3.363931762794476, |
|
"grad_norm": 0.15349090525146497, |
|
"learning_rate": 2.3261953091841553e-05, |
|
"loss": 0.4546, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 3.3769293257514215, |
|
"grad_norm": 0.13644110770797446, |
|
"learning_rate": 2.2928942580143855e-05, |
|
"loss": 0.4526, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 3.3899268887083673, |
|
"grad_norm": 0.1490492694969517, |
|
"learning_rate": 2.2597372540717083e-05, |
|
"loss": 0.4568, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 3.4029244516653128, |
|
"grad_norm": 0.14894023043131757, |
|
"learning_rate": 2.226727095176057e-05, |
|
"loss": 0.4588, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 3.415922014622258, |
|
"grad_norm": 0.14995590721137517, |
|
"learning_rate": 2.1938665667564435e-05, |
|
"loss": 0.4507, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 3.428919577579204, |
|
"grad_norm": 0.14184790166599112, |
|
"learning_rate": 2.1611584416159106e-05, |
|
"loss": 0.4496, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 3.4419171405361495, |
|
"grad_norm": 0.13481813074778676, |
|
"learning_rate": 2.1286054796975696e-05, |
|
"loss": 0.4541, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 3.454914703493095, |
|
"grad_norm": 0.12999426318312995, |
|
"learning_rate": 2.096210427851706e-05, |
|
"loss": 0.4544, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 3.467912266450041, |
|
"grad_norm": 0.13239806186780637, |
|
"learning_rate": 2.063976019604006e-05, |
|
"loss": 0.451, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 3.4809098294069862, |
|
"grad_norm": 0.1271255029835273, |
|
"learning_rate": 2.0319049749248876e-05, |
|
"loss": 0.4487, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 3.4939073923639317, |
|
"grad_norm": 0.13303879175543112, |
|
"learning_rate": 2.0000000000000012e-05, |
|
"loss": 0.4522, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 3.506904955320877, |
|
"grad_norm": 0.12772791682528986, |
|
"learning_rate": 1.9682637870018638e-05, |
|
"loss": 0.4486, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 3.519902518277823, |
|
"grad_norm": 0.13368037803272537, |
|
"learning_rate": 1.9366990138627054e-05, |
|
"loss": 0.4556, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 3.5329000812347684, |
|
"grad_norm": 0.12998443969153295, |
|
"learning_rate": 1.9053083440484887e-05, |
|
"loss": 0.4573, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 3.5458976441917143, |
|
"grad_norm": 0.1299143292827015, |
|
"learning_rate": 1.8740944263341773e-05, |
|
"loss": 0.4496, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 3.5588952071486597, |
|
"grad_norm": 0.13501723708928842, |
|
"learning_rate": 1.8430598945802156e-05, |
|
"loss": 0.4529, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 3.571892770105605, |
|
"grad_norm": 0.1392141581263843, |
|
"learning_rate": 1.8122073675102935e-05, |
|
"loss": 0.4606, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 3.5848903330625506, |
|
"grad_norm": 0.13279583183929072, |
|
"learning_rate": 1.781539448490365e-05, |
|
"loss": 0.4559, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 3.5978878960194964, |
|
"grad_norm": 0.13802766481193707, |
|
"learning_rate": 1.7510587253089842e-05, |
|
"loss": 0.4525, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 3.610885458976442, |
|
"grad_norm": 0.1335727117344679, |
|
"learning_rate": 1.7207677699589355e-05, |
|
"loss": 0.4529, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 3.6238830219333877, |
|
"grad_norm": 0.12489253393946519, |
|
"learning_rate": 1.690669138420215e-05, |
|
"loss": 0.4582, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 3.636880584890333, |
|
"grad_norm": 0.1413569401198253, |
|
"learning_rate": 1.6607653704443457e-05, |
|
"loss": 0.4559, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 3.6498781478472786, |
|
"grad_norm": 0.11828655169190146, |
|
"learning_rate": 1.6310589893400804e-05, |
|
"loss": 0.4518, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 3.662875710804224, |
|
"grad_norm": 0.12039871469350676, |
|
"learning_rate": 1.601552501760473e-05, |
|
"loss": 0.4513, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 3.67587327376117, |
|
"grad_norm": 0.10972016559298864, |
|
"learning_rate": 1.5722483974913737e-05, |
|
"loss": 0.4585, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 3.6888708367181153, |
|
"grad_norm": 0.11605815211359002, |
|
"learning_rate": 1.5431491492413288e-05, |
|
"loss": 0.4548, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 3.7018683996750608, |
|
"grad_norm": 0.11113738686213914, |
|
"learning_rate": 1.5142572124329418e-05, |
|
"loss": 0.456, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 3.7148659626320066, |
|
"grad_norm": 0.11801948433211017, |
|
"learning_rate": 1.4855750249956718e-05, |
|
"loss": 0.4567, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 3.727863525588952, |
|
"grad_norm": 0.11508267709388358, |
|
"learning_rate": 1.457105007160129e-05, |
|
"loss": 0.4567, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 3.7408610885458975, |
|
"grad_norm": 0.11951109036290335, |
|
"learning_rate": 1.4288495612538427e-05, |
|
"loss": 0.4543, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 3.7538586515028434, |
|
"grad_norm": 0.11609444931011924, |
|
"learning_rate": 1.4008110714985623e-05, |
|
"loss": 0.457, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 3.766856214459789, |
|
"grad_norm": 0.12161842365979787, |
|
"learning_rate": 1.3729919038090627e-05, |
|
"loss": 0.4574, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 3.7798537774167342, |
|
"grad_norm": 0.12517775168843562, |
|
"learning_rate": 1.3453944055935151e-05, |
|
"loss": 0.4547, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 3.79285134037368, |
|
"grad_norm": 0.11520911340042557, |
|
"learning_rate": 1.3180209055554043e-05, |
|
"loss": 0.4513, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 3.8058489033306255, |
|
"grad_norm": 0.11815010738675953, |
|
"learning_rate": 1.2908737134970367e-05, |
|
"loss": 0.443, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 3.818846466287571, |
|
"grad_norm": 0.1162507897952791, |
|
"learning_rate": 1.2639551201246278e-05, |
|
"loss": 0.4516, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 3.8318440292445164, |
|
"grad_norm": 0.11366641746047221, |
|
"learning_rate": 1.2372673968550229e-05, |
|
"loss": 0.4547, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 3.8448415922014623, |
|
"grad_norm": 0.13504962259021788, |
|
"learning_rate": 1.2108127956240186e-05, |
|
"loss": 0.4455, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 3.8578391551584077, |
|
"grad_norm": 0.11026952373517614, |
|
"learning_rate": 1.1845935486963546e-05, |
|
"loss": 0.4525, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 3.8708367181153536, |
|
"grad_norm": 0.10668225682868658, |
|
"learning_rate": 1.158611868477344e-05, |
|
"loss": 0.4566, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 3.883834281072299, |
|
"grad_norm": 0.1354540165594788, |
|
"learning_rate": 1.1328699473261957e-05, |
|
"loss": 0.4472, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 3.8968318440292444, |
|
"grad_norm": 0.10921111595164792, |
|
"learning_rate": 1.107369957371013e-05, |
|
"loss": 0.4473, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 3.90982940698619, |
|
"grad_norm": 0.10437459191229037, |
|
"learning_rate": 1.0821140503255174e-05, |
|
"loss": 0.4539, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 3.9228269699431357, |
|
"grad_norm": 0.11124834426391973, |
|
"learning_rate": 1.0571043573074737e-05, |
|
"loss": 0.4524, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 3.935824532900081, |
|
"grad_norm": 0.11103073467741553, |
|
"learning_rate": 1.0323429886588743e-05, |
|
"loss": 0.4551, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 3.948822095857027, |
|
"grad_norm": 0.0996775517579157, |
|
"learning_rate": 1.0078320337678584e-05, |
|
"loss": 0.4542, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 3.9618196588139725, |
|
"grad_norm": 0.10553906139080531, |
|
"learning_rate": 9.835735608924155e-06, |
|
"loss": 0.4559, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 3.974817221770918, |
|
"grad_norm": 0.11276985701526075, |
|
"learning_rate": 9.595696169858542e-06, |
|
"loss": 0.4556, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 3.9878147847278633, |
|
"grad_norm": 0.10020067885290669, |
|
"learning_rate": 9.358222275240884e-06, |
|
"loss": 0.452, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 4.000812347684809, |
|
"grad_norm": 0.1015888198294778, |
|
"learning_rate": 9.123333963347166e-06, |
|
"loss": 0.4523, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 4.013809910641755, |
|
"grad_norm": 0.19346874584833063, |
|
"learning_rate": 8.89105105427945e-06, |
|
"loss": 0.4304, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 4.0268074735987005, |
|
"grad_norm": 0.1492042453109882, |
|
"learning_rate": 8.661393148293355e-06, |
|
"loss": 0.4184, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 4.039805036555646, |
|
"grad_norm": 0.11128451836895309, |
|
"learning_rate": 8.434379624144261e-06, |
|
"loss": 0.4272, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 4.052802599512591, |
|
"grad_norm": 0.15566082975695297, |
|
"learning_rate": 8.210029637452016e-06, |
|
"loss": 0.4291, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 4.065800162469537, |
|
"grad_norm": 0.18015489370960358, |
|
"learning_rate": 7.988362119084642e-06, |
|
"loss": 0.43, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 4.078797725426482, |
|
"grad_norm": 0.16617077404145308, |
|
"learning_rate": 7.769395773560874e-06, |
|
"loss": 0.4245, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 4.0917952883834285, |
|
"grad_norm": 0.12915804996415034, |
|
"learning_rate": 7.553149077471915e-06, |
|
"loss": 0.4205, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 4.104792851340374, |
|
"grad_norm": 0.12694294053103128, |
|
"learning_rate": 7.3396402779222845e-06, |
|
"loss": 0.4281, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 4.117790414297319, |
|
"grad_norm": 0.14590711745883797, |
|
"learning_rate": 7.128887390990198e-06, |
|
"loss": 0.4361, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 4.130787977254265, |
|
"grad_norm": 0.13808851249499354, |
|
"learning_rate": 6.9209082002072725e-06, |
|
"loss": 0.4215, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 4.14378554021121, |
|
"grad_norm": 0.129695250623056, |
|
"learning_rate": 6.715720255058e-06, |
|
"loss": 0.4252, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 4.156783103168156, |
|
"grad_norm": 0.11617214795717144, |
|
"learning_rate": 6.513340869498859e-06, |
|
"loss": 0.4251, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 4.169780666125101, |
|
"grad_norm": 0.12511546150804922, |
|
"learning_rate": 6.313787120497376e-06, |
|
"loss": 0.4223, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 4.182778229082047, |
|
"grad_norm": 0.11917710079150087, |
|
"learning_rate": 6.117075846591123e-06, |
|
"loss": 0.4259, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 4.195775792038993, |
|
"grad_norm": 0.11982991360751519, |
|
"learning_rate": 5.923223646466923e-06, |
|
"loss": 0.4267, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 4.208773354995938, |
|
"grad_norm": 0.11087809076543045, |
|
"learning_rate": 5.732246877560146e-06, |
|
"loss": 0.4237, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 4.221770917952884, |
|
"grad_norm": 0.11037591637224678, |
|
"learning_rate": 5.5441616546745646e-06, |
|
"loss": 0.4253, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 4.234768480909829, |
|
"grad_norm": 0.11290723637234998, |
|
"learning_rate": 5.358983848622452e-06, |
|
"loss": 0.4272, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 4.247766043866775, |
|
"grad_norm": 0.11769455319020229, |
|
"learning_rate": 5.176729084885508e-06, |
|
"loss": 0.4234, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 4.260763606823721, |
|
"grad_norm": 0.11385474199844745, |
|
"learning_rate": 4.99741274229625e-06, |
|
"loss": 0.4323, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 4.273761169780666, |
|
"grad_norm": 0.10749198484614475, |
|
"learning_rate": 4.821049951740442e-06, |
|
"loss": 0.4295, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 4.286758732737612, |
|
"grad_norm": 0.10566297009015177, |
|
"learning_rate": 4.647655594880225e-06, |
|
"loss": 0.4254, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 4.299756295694557, |
|
"grad_norm": 0.10419068782938447, |
|
"learning_rate": 4.4772443028985004e-06, |
|
"loss": 0.4252, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 4.312753858651503, |
|
"grad_norm": 0.10950077630402558, |
|
"learning_rate": 4.3098304552642385e-06, |
|
"loss": 0.4242, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 4.325751421608448, |
|
"grad_norm": 0.1100371440360915, |
|
"learning_rate": 4.1454281785191995e-06, |
|
"loss": 0.4197, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 4.338748984565394, |
|
"grad_norm": 0.10391792774677544, |
|
"learning_rate": 3.984051345085855e-06, |
|
"loss": 0.4258, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 4.35174654752234, |
|
"grad_norm": 0.09904939283103152, |
|
"learning_rate": 3.825713572096903e-06, |
|
"loss": 0.4184, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 4.364744110479285, |
|
"grad_norm": 0.10192004321049586, |
|
"learning_rate": 3.6704282202461515e-06, |
|
"loss": 0.4281, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 4.377741673436231, |
|
"grad_norm": 0.09890117596157007, |
|
"learning_rate": 3.518208392661184e-06, |
|
"loss": 0.4197, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 4.390739236393176, |
|
"grad_norm": 0.09792268745311886, |
|
"learning_rate": 3.3690669337977e-06, |
|
"loss": 0.4265, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 4.4037367993501215, |
|
"grad_norm": 0.09563298489182019, |
|
"learning_rate": 3.2230164283556918e-06, |
|
"loss": 0.4251, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 4.416734362307068, |
|
"grad_norm": 0.09937136144673961, |
|
"learning_rate": 3.080069200217497e-06, |
|
"loss": 0.4345, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 4.429731925264013, |
|
"grad_norm": 0.09928349883535201, |
|
"learning_rate": 2.9402373114079295e-06, |
|
"loss": 0.4285, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 4.442729488220959, |
|
"grad_norm": 0.09247546072284324, |
|
"learning_rate": 2.803532561076492e-06, |
|
"loss": 0.4273, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 4.455727051177904, |
|
"grad_norm": 0.09749820385703685, |
|
"learning_rate": 2.669966484501716e-06, |
|
"loss": 0.4269, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 4.46872461413485, |
|
"grad_norm": 0.09646403701008398, |
|
"learning_rate": 2.5395503521178143e-06, |
|
"loss": 0.4308, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 4.481722177091795, |
|
"grad_norm": 0.0936333573004958, |
|
"learning_rate": 2.4122951685636674e-06, |
|
"loss": 0.4224, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 4.49471974004874, |
|
"grad_norm": 0.09222486151634639, |
|
"learning_rate": 2.2882116717542634e-06, |
|
"loss": 0.4228, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 4.507717303005687, |
|
"grad_norm": 0.0937700330761675, |
|
"learning_rate": 2.1673103319746146e-06, |
|
"loss": 0.4251, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 4.520714865962632, |
|
"grad_norm": 0.09523262003402952, |
|
"learning_rate": 2.049601350996233e-06, |
|
"loss": 0.425, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 4.533712428919578, |
|
"grad_norm": 0.09580115361604435, |
|
"learning_rate": 1.93509466121633e-06, |
|
"loss": 0.4278, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 4.546709991876523, |
|
"grad_norm": 0.09551827606861985, |
|
"learning_rate": 1.8237999248197002e-06, |
|
"loss": 0.4277, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 4.5597075548334685, |
|
"grad_norm": 0.09398206095795404, |
|
"learning_rate": 1.7157265329634354e-06, |
|
"loss": 0.4296, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 4.572705117790415, |
|
"grad_norm": 0.09186446471518088, |
|
"learning_rate": 1.6108836049844434e-06, |
|
"loss": 0.4254, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 4.58570268074736, |
|
"grad_norm": 0.09291207003002876, |
|
"learning_rate": 1.5092799876299835e-06, |
|
"loss": 0.4228, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 4.598700243704306, |
|
"grad_norm": 0.09321080065504848, |
|
"learning_rate": 1.4109242543111834e-06, |
|
"loss": 0.4224, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 4.611697806661251, |
|
"grad_norm": 0.09820369207399382, |
|
"learning_rate": 1.3158247043795735e-06, |
|
"loss": 0.425, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 4.6246953696181965, |
|
"grad_norm": 0.09483780290747863, |
|
"learning_rate": 1.2239893624267852e-06, |
|
"loss": 0.4248, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 4.637692932575142, |
|
"grad_norm": 0.09731358909691826, |
|
"learning_rate": 1.1354259776074472e-06, |
|
"loss": 0.4235, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 4.650690495532087, |
|
"grad_norm": 0.0904965273633941, |
|
"learning_rate": 1.050142022985292e-06, |
|
"loss": 0.4181, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 4.663688058489034, |
|
"grad_norm": 0.08967404436562013, |
|
"learning_rate": 9.681446949025752e-07, |
|
"loss": 0.4294, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 4.676685621445979, |
|
"grad_norm": 0.09202959723546777, |
|
"learning_rate": 8.89440912372832e-07, |
|
"loss": 0.4223, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 4.6896831844029245, |
|
"grad_norm": 0.0934294923741134, |
|
"learning_rate": 8.140373164970428e-07, |
|
"loss": 0.4254, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 4.70268074735987, |
|
"grad_norm": 0.09277475785552658, |
|
"learning_rate": 7.419402699032852e-07, |
|
"loss": 0.4264, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 4.715678310316815, |
|
"grad_norm": 0.0963901158463573, |
|
"learning_rate": 6.731558562097995e-07, |
|
"loss": 0.4244, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 4.728675873273761, |
|
"grad_norm": 0.08814663638316801, |
|
"learning_rate": 6.076898795116792e-07, |
|
"loss": 0.4191, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 4.741673436230707, |
|
"grad_norm": 0.08994523311851658, |
|
"learning_rate": 5.455478638911071e-07, |
|
"loss": 0.4287, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 4.754670999187653, |
|
"grad_norm": 0.08992860659969752, |
|
"learning_rate": 4.867350529512261e-07, |
|
"loss": 0.4243, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 4.767668562144598, |
|
"grad_norm": 0.08903822534894691, |
|
"learning_rate": 4.3125640937368373e-07, |
|
"loss": 0.4254, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 4.780666125101543, |
|
"grad_norm": 0.09037872867259723, |
|
"learning_rate": 3.791166144998704e-07, |
|
"loss": 0.4237, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 4.793663688058489, |
|
"grad_norm": 0.08849981605290622, |
|
"learning_rate": 3.3032006793590977e-07, |
|
"loss": 0.4256, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 4.806661251015434, |
|
"grad_norm": 0.08939190625410524, |
|
"learning_rate": 2.848708871814054e-07, |
|
"loss": 0.432, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 4.81965881397238, |
|
"grad_norm": 0.08978512061239115, |
|
"learning_rate": 2.4277290728202063e-07, |
|
"loss": 0.431, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 4.832656376929326, |
|
"grad_norm": 0.08749342151530978, |
|
"learning_rate": 2.040296805058528e-07, |
|
"loss": 0.4282, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 4.8456539398862715, |
|
"grad_norm": 0.09394237513340908, |
|
"learning_rate": 1.6864447604370004e-07, |
|
"loss": 0.4175, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 4.858651502843217, |
|
"grad_norm": 0.0881034945018527, |
|
"learning_rate": 1.3662027973320614e-07, |
|
"loss": 0.4225, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 4.871649065800162, |
|
"grad_norm": 0.08852561609870382, |
|
"learning_rate": 1.0795979380690657e-07, |
|
"loss": 0.4227, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 4.884646628757108, |
|
"grad_norm": 0.0911620154787333, |
|
"learning_rate": 8.266543666421544e-08, |
|
"loss": 0.429, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 4.897644191714054, |
|
"grad_norm": 0.08685734979527214, |
|
"learning_rate": 6.073934266735303e-08, |
|
"loss": 0.4251, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 4.9106417546709995, |
|
"grad_norm": 0.08736267011789721, |
|
"learning_rate": 4.218336196125439e-08, |
|
"loss": 0.4222, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 4.923639317627945, |
|
"grad_norm": 0.0888955083475754, |
|
"learning_rate": 2.699906031745414e-08, |
|
"loss": 0.4263, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 4.93663688058489, |
|
"grad_norm": 0.08888998189271619, |
|
"learning_rate": 1.5187719001943378e-08, |
|
"loss": 0.429, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 4.949634443541836, |
|
"grad_norm": 0.08806089423257202, |
|
"learning_rate": 6.750334667091629e-09, |
|
"loss": 0.4241, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 4.962632006498781, |
|
"grad_norm": 0.0864294126107438, |
|
"learning_rate": 1.6876192675052695e-09, |
|
"loss": 0.4205, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 4.975629569455727, |
|
"grad_norm": 0.0892424352366353, |
|
"learning_rate": 0.0, |
|
"loss": 0.4228, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 4.975629569455727, |
|
"step": 380, |
|
"total_flos": 9.790538054727893e+18, |
|
"train_loss": 0.17350224256515503, |
|
"train_runtime": 16013.9297, |
|
"train_samples_per_second": 12.293, |
|
"train_steps_per_second": 0.024 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 380, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 9.790538054727893e+18, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|