|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.982278481012658, |
|
"eval_steps": 500, |
|
"global_step": 1230, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.004050632911392405, |
|
"grad_norm": 9.933298721627814, |
|
"learning_rate": 3.2520325203252037e-07, |
|
"loss": 1.8178, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00810126582278481, |
|
"grad_norm": 9.897819497107257, |
|
"learning_rate": 6.504065040650407e-07, |
|
"loss": 1.8435, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.012151898734177215, |
|
"grad_norm": 9.959729308466024, |
|
"learning_rate": 9.75609756097561e-07, |
|
"loss": 1.7978, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01620253164556962, |
|
"grad_norm": 9.841084825490139, |
|
"learning_rate": 1.3008130081300815e-06, |
|
"loss": 1.7812, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.020253164556962026, |
|
"grad_norm": 9.224569384269643, |
|
"learning_rate": 1.6260162601626018e-06, |
|
"loss": 1.8007, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02430379746835443, |
|
"grad_norm": 9.201906017819061, |
|
"learning_rate": 1.951219512195122e-06, |
|
"loss": 1.8101, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.028354430379746835, |
|
"grad_norm": 7.261037855311741, |
|
"learning_rate": 2.2764227642276426e-06, |
|
"loss": 1.7217, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.03240506329113924, |
|
"grad_norm": 6.9268303591835485, |
|
"learning_rate": 2.601626016260163e-06, |
|
"loss": 1.7235, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.03645569620253165, |
|
"grad_norm": 4.1786676019541185, |
|
"learning_rate": 2.926829268292683e-06, |
|
"loss": 1.6538, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.04050632911392405, |
|
"grad_norm": 3.725397871410673, |
|
"learning_rate": 3.2520325203252037e-06, |
|
"loss": 1.6585, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.044556962025316456, |
|
"grad_norm": 3.6144087219821617, |
|
"learning_rate": 3.577235772357724e-06, |
|
"loss": 1.6638, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.04860759493670886, |
|
"grad_norm": 6.727856164507345, |
|
"learning_rate": 3.902439024390244e-06, |
|
"loss": 1.6482, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.052658227848101265, |
|
"grad_norm": 6.709680231141028, |
|
"learning_rate": 4.227642276422765e-06, |
|
"loss": 1.6345, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.05670886075949367, |
|
"grad_norm": 6.819781922830209, |
|
"learning_rate": 4.552845528455285e-06, |
|
"loss": 1.6107, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.060759493670886074, |
|
"grad_norm": 6.358479763558506, |
|
"learning_rate": 4.8780487804878055e-06, |
|
"loss": 1.6071, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.06481012658227848, |
|
"grad_norm": 5.129696946803027, |
|
"learning_rate": 5.203252032520326e-06, |
|
"loss": 1.5502, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.06886075949367089, |
|
"grad_norm": 4.88237126921362, |
|
"learning_rate": 5.528455284552846e-06, |
|
"loss": 1.567, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0729113924050633, |
|
"grad_norm": 3.831215973439533, |
|
"learning_rate": 5.853658536585366e-06, |
|
"loss": 1.5153, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0769620253164557, |
|
"grad_norm": 2.9896153971872512, |
|
"learning_rate": 6.178861788617887e-06, |
|
"loss": 1.5199, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0810126582278481, |
|
"grad_norm": 2.2856836758173706, |
|
"learning_rate": 6.504065040650407e-06, |
|
"loss": 1.4982, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.08506329113924051, |
|
"grad_norm": 2.3458750821434498, |
|
"learning_rate": 6.829268292682928e-06, |
|
"loss": 1.4826, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.08911392405063291, |
|
"grad_norm": 2.4390259172121223, |
|
"learning_rate": 7.154471544715448e-06, |
|
"loss": 1.452, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.09316455696202532, |
|
"grad_norm": 2.263225931909868, |
|
"learning_rate": 7.4796747967479676e-06, |
|
"loss": 1.436, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.09721518987341772, |
|
"grad_norm": 2.0592425231154814, |
|
"learning_rate": 7.804878048780489e-06, |
|
"loss": 1.42, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.10126582278481013, |
|
"grad_norm": 1.612433437091964, |
|
"learning_rate": 8.130081300813009e-06, |
|
"loss": 1.4258, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.10531645569620253, |
|
"grad_norm": 1.4538904013821738, |
|
"learning_rate": 8.45528455284553e-06, |
|
"loss": 1.4295, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.10936708860759493, |
|
"grad_norm": 1.7208305683466307, |
|
"learning_rate": 8.78048780487805e-06, |
|
"loss": 1.4155, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.11341772151898734, |
|
"grad_norm": 1.546216554836619, |
|
"learning_rate": 9.10569105691057e-06, |
|
"loss": 1.392, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.11746835443037974, |
|
"grad_norm": 1.172588758346927, |
|
"learning_rate": 9.43089430894309e-06, |
|
"loss": 1.3881, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.12151898734177215, |
|
"grad_norm": 1.5110114910255077, |
|
"learning_rate": 9.756097560975611e-06, |
|
"loss": 1.3917, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.12556962025316457, |
|
"grad_norm": 1.2674040313167458, |
|
"learning_rate": 1.008130081300813e-05, |
|
"loss": 1.372, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.12962025316455697, |
|
"grad_norm": 1.210674013224536, |
|
"learning_rate": 1.0406504065040652e-05, |
|
"loss": 1.3559, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.13367088607594937, |
|
"grad_norm": 1.2178120633526373, |
|
"learning_rate": 1.0731707317073172e-05, |
|
"loss": 1.3626, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.13772151898734178, |
|
"grad_norm": 1.0797338608965623, |
|
"learning_rate": 1.1056910569105692e-05, |
|
"loss": 1.3555, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.14177215189873418, |
|
"grad_norm": 1.1261892745516537, |
|
"learning_rate": 1.1382113821138213e-05, |
|
"loss": 1.3515, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.1458227848101266, |
|
"grad_norm": 1.0158869748246238, |
|
"learning_rate": 1.1707317073170731e-05, |
|
"loss": 1.3441, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.149873417721519, |
|
"grad_norm": 0.9613705931197408, |
|
"learning_rate": 1.2032520325203254e-05, |
|
"loss": 1.3358, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.1539240506329114, |
|
"grad_norm": 1.0289433733318691, |
|
"learning_rate": 1.2357723577235774e-05, |
|
"loss": 1.3351, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.1579746835443038, |
|
"grad_norm": 0.9941746051672029, |
|
"learning_rate": 1.2682926829268294e-05, |
|
"loss": 1.3198, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.1620253164556962, |
|
"grad_norm": 0.9520419096796571, |
|
"learning_rate": 1.3008130081300815e-05, |
|
"loss": 1.3204, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.1660759493670886, |
|
"grad_norm": 1.237549022985381, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 1.3302, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.17012658227848101, |
|
"grad_norm": 0.9305621317614939, |
|
"learning_rate": 1.3658536585365855e-05, |
|
"loss": 1.3284, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.17417721518987342, |
|
"grad_norm": 0.9080151523547578, |
|
"learning_rate": 1.3983739837398376e-05, |
|
"loss": 1.3291, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.17822784810126582, |
|
"grad_norm": 0.9626446313031342, |
|
"learning_rate": 1.4308943089430896e-05, |
|
"loss": 1.3398, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.18227848101265823, |
|
"grad_norm": 1.0935273041231537, |
|
"learning_rate": 1.4634146341463415e-05, |
|
"loss": 1.3405, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.18632911392405063, |
|
"grad_norm": 0.9595553373527506, |
|
"learning_rate": 1.4959349593495935e-05, |
|
"loss": 1.3209, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.19037974683544304, |
|
"grad_norm": 1.4349868663398164, |
|
"learning_rate": 1.528455284552846e-05, |
|
"loss": 1.3307, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.19443037974683544, |
|
"grad_norm": 1.1727813243603573, |
|
"learning_rate": 1.5609756097560978e-05, |
|
"loss": 1.326, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.19848101265822785, |
|
"grad_norm": 1.2924896058583515, |
|
"learning_rate": 1.5934959349593496e-05, |
|
"loss": 1.3221, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.20253164556962025, |
|
"grad_norm": 1.6075317808400273, |
|
"learning_rate": 1.6260162601626018e-05, |
|
"loss": 1.3228, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.20658227848101265, |
|
"grad_norm": 1.0160806494151875, |
|
"learning_rate": 1.6585365853658537e-05, |
|
"loss": 1.3249, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.21063291139240506, |
|
"grad_norm": 1.3038622421600723, |
|
"learning_rate": 1.691056910569106e-05, |
|
"loss": 1.284, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.21468354430379746, |
|
"grad_norm": 1.059360155414228, |
|
"learning_rate": 1.7235772357723578e-05, |
|
"loss": 1.308, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.21873417721518987, |
|
"grad_norm": 0.8729632808800111, |
|
"learning_rate": 1.75609756097561e-05, |
|
"loss": 1.288, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.22278481012658227, |
|
"grad_norm": 1.1910013804161288, |
|
"learning_rate": 1.788617886178862e-05, |
|
"loss": 1.3086, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.22683544303797468, |
|
"grad_norm": 0.9170151877278664, |
|
"learning_rate": 1.821138211382114e-05, |
|
"loss": 1.2988, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.23088607594936708, |
|
"grad_norm": 1.102495241219338, |
|
"learning_rate": 1.8536585365853663e-05, |
|
"loss": 1.2864, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.23493670886075949, |
|
"grad_norm": 1.1787584441957, |
|
"learning_rate": 1.886178861788618e-05, |
|
"loss": 1.3032, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.2389873417721519, |
|
"grad_norm": 1.1504936938152774, |
|
"learning_rate": 1.91869918699187e-05, |
|
"loss": 1.2869, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.2430379746835443, |
|
"grad_norm": 1.3136328739498286, |
|
"learning_rate": 1.9512195121951222e-05, |
|
"loss": 1.2869, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.2470886075949367, |
|
"grad_norm": 1.162137996087394, |
|
"learning_rate": 1.983739837398374e-05, |
|
"loss": 1.3099, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.25113924050632913, |
|
"grad_norm": 1.0187637612897926, |
|
"learning_rate": 2.016260162601626e-05, |
|
"loss": 1.3104, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.25518987341772154, |
|
"grad_norm": 1.1773984723061925, |
|
"learning_rate": 2.048780487804878e-05, |
|
"loss": 1.2662, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.25924050632911394, |
|
"grad_norm": 1.2100642936417008, |
|
"learning_rate": 2.0813008130081303e-05, |
|
"loss": 1.2942, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.26329113924050634, |
|
"grad_norm": 1.26480500230016, |
|
"learning_rate": 2.1138211382113822e-05, |
|
"loss": 1.289, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.26734177215189875, |
|
"grad_norm": 1.1992282536437207, |
|
"learning_rate": 2.1463414634146344e-05, |
|
"loss": 1.298, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.27139240506329115, |
|
"grad_norm": 1.1777622297620634, |
|
"learning_rate": 2.1788617886178863e-05, |
|
"loss": 1.2952, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.27544303797468356, |
|
"grad_norm": 1.4576673753007698, |
|
"learning_rate": 2.2113821138211385e-05, |
|
"loss": 1.2892, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.27949367088607596, |
|
"grad_norm": 1.21752818913171, |
|
"learning_rate": 2.2439024390243907e-05, |
|
"loss": 1.2798, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.28354430379746837, |
|
"grad_norm": 1.0563711356552656, |
|
"learning_rate": 2.2764227642276426e-05, |
|
"loss": 1.2976, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.28759493670886077, |
|
"grad_norm": 1.2519312317261713, |
|
"learning_rate": 2.3089430894308948e-05, |
|
"loss": 1.2802, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.2916455696202532, |
|
"grad_norm": 1.1674995101915469, |
|
"learning_rate": 2.3414634146341463e-05, |
|
"loss": 1.2796, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.2956962025316456, |
|
"grad_norm": 1.186326379256793, |
|
"learning_rate": 2.3739837398373985e-05, |
|
"loss": 1.3004, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.299746835443038, |
|
"grad_norm": 1.3708127779316692, |
|
"learning_rate": 2.4065040650406507e-05, |
|
"loss": 1.2795, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.3037974683544304, |
|
"grad_norm": 1.3114551879278658, |
|
"learning_rate": 2.4390243902439026e-05, |
|
"loss": 1.2733, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.3078481012658228, |
|
"grad_norm": 1.3416993718752204, |
|
"learning_rate": 2.4715447154471548e-05, |
|
"loss": 1.2991, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.3118987341772152, |
|
"grad_norm": 1.4054919906309085, |
|
"learning_rate": 2.5040650406504066e-05, |
|
"loss": 1.2723, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.3159493670886076, |
|
"grad_norm": 1.2089496609738961, |
|
"learning_rate": 2.536585365853659e-05, |
|
"loss": 1.2644, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.2753877462881964, |
|
"learning_rate": 2.569105691056911e-05, |
|
"loss": 1.287, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.3240506329113924, |
|
"grad_norm": 1.354655118278104, |
|
"learning_rate": 2.601626016260163e-05, |
|
"loss": 1.2776, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.3281012658227848, |
|
"grad_norm": 1.4407877823509578, |
|
"learning_rate": 2.634146341463415e-05, |
|
"loss": 1.3079, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.3321518987341772, |
|
"grad_norm": 1.2838233171245736, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 1.305, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.3362025316455696, |
|
"grad_norm": 1.2470806178705933, |
|
"learning_rate": 2.699186991869919e-05, |
|
"loss": 1.2791, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.34025316455696203, |
|
"grad_norm": 1.217609595123036, |
|
"learning_rate": 2.731707317073171e-05, |
|
"loss": 1.3132, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.34430379746835443, |
|
"grad_norm": 1.8382600917892271, |
|
"learning_rate": 2.764227642276423e-05, |
|
"loss": 1.2862, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.34835443037974684, |
|
"grad_norm": 1.3215285337246088, |
|
"learning_rate": 2.796747967479675e-05, |
|
"loss": 1.2685, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.35240506329113924, |
|
"grad_norm": 1.9846372812135762, |
|
"learning_rate": 2.829268292682927e-05, |
|
"loss": 1.3023, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.35645569620253165, |
|
"grad_norm": 1.5283058641876313, |
|
"learning_rate": 2.8617886178861792e-05, |
|
"loss": 1.3161, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.36050632911392405, |
|
"grad_norm": 1.756734267285129, |
|
"learning_rate": 2.8943089430894314e-05, |
|
"loss": 1.3043, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.36455696202531646, |
|
"grad_norm": 1.5679608196674877, |
|
"learning_rate": 2.926829268292683e-05, |
|
"loss": 1.2516, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.36860759493670886, |
|
"grad_norm": 1.3923559238342174, |
|
"learning_rate": 2.959349593495935e-05, |
|
"loss": 1.2755, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.37265822784810126, |
|
"grad_norm": 1.4238896639021854, |
|
"learning_rate": 2.991869918699187e-05, |
|
"loss": 1.2774, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.37670886075949367, |
|
"grad_norm": 1.356362634598468, |
|
"learning_rate": 3.0243902439024392e-05, |
|
"loss": 1.2634, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.3807594936708861, |
|
"grad_norm": 1.3675453411632543, |
|
"learning_rate": 3.056910569105692e-05, |
|
"loss": 1.2822, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.3848101265822785, |
|
"grad_norm": 1.4194627065729042, |
|
"learning_rate": 3.089430894308943e-05, |
|
"loss": 1.2756, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.3888607594936709, |
|
"grad_norm": 1.274969732938223, |
|
"learning_rate": 3.1219512195121955e-05, |
|
"loss": 1.2357, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.3929113924050633, |
|
"grad_norm": 1.813267955079747, |
|
"learning_rate": 3.154471544715447e-05, |
|
"loss": 1.2664, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.3969620253164557, |
|
"grad_norm": 1.3676405906942297, |
|
"learning_rate": 3.186991869918699e-05, |
|
"loss": 1.2779, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.4010126582278481, |
|
"grad_norm": 1.8158323053910375, |
|
"learning_rate": 3.2195121951219514e-05, |
|
"loss": 1.2827, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.4050632911392405, |
|
"grad_norm": 2.328954218686755, |
|
"learning_rate": 3.2520325203252037e-05, |
|
"loss": 1.2792, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.4091139240506329, |
|
"grad_norm": 1.9064613924281169, |
|
"learning_rate": 3.284552845528456e-05, |
|
"loss": 1.2739, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.4131645569620253, |
|
"grad_norm": 2.596307309778557, |
|
"learning_rate": 3.3170731707317074e-05, |
|
"loss": 1.2727, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.4172151898734177, |
|
"grad_norm": 1.7984053796736303, |
|
"learning_rate": 3.3495934959349596e-05, |
|
"loss": 1.2756, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.4212658227848101, |
|
"grad_norm": 2.507185622845171, |
|
"learning_rate": 3.382113821138212e-05, |
|
"loss": 1.2923, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.4253164556962025, |
|
"grad_norm": 1.9203261035500043, |
|
"learning_rate": 3.414634146341463e-05, |
|
"loss": 1.2831, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.4293670886075949, |
|
"grad_norm": 2.3331822897940224, |
|
"learning_rate": 3.4471544715447155e-05, |
|
"loss": 1.2717, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.43341772151898733, |
|
"grad_norm": 1.8664773360493212, |
|
"learning_rate": 3.479674796747968e-05, |
|
"loss": 1.2626, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.43746835443037974, |
|
"grad_norm": 2.292086406818919, |
|
"learning_rate": 3.51219512195122e-05, |
|
"loss": 1.2847, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.44151898734177214, |
|
"grad_norm": 1.9186721185797075, |
|
"learning_rate": 3.544715447154472e-05, |
|
"loss": 1.2609, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.44556962025316454, |
|
"grad_norm": 2.1743759127244577, |
|
"learning_rate": 3.577235772357724e-05, |
|
"loss": 1.2284, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.44962025316455695, |
|
"grad_norm": 1.873004446473688, |
|
"learning_rate": 3.609756097560976e-05, |
|
"loss": 1.2544, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.45367088607594935, |
|
"grad_norm": 1.705936933825921, |
|
"learning_rate": 3.642276422764228e-05, |
|
"loss": 1.2678, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.45772151898734176, |
|
"grad_norm": 1.9874611748970357, |
|
"learning_rate": 3.67479674796748e-05, |
|
"loss": 1.265, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.46177215189873416, |
|
"grad_norm": 1.4192685631574702, |
|
"learning_rate": 3.7073170731707325e-05, |
|
"loss": 1.2549, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.46582278481012657, |
|
"grad_norm": 2.0386379577422122, |
|
"learning_rate": 3.739837398373984e-05, |
|
"loss": 1.28, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.46987341772151897, |
|
"grad_norm": 1.3850994392717229, |
|
"learning_rate": 3.772357723577236e-05, |
|
"loss": 1.2808, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.4739240506329114, |
|
"grad_norm": 1.7581988959901718, |
|
"learning_rate": 3.804878048780488e-05, |
|
"loss": 1.2713, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.4779746835443038, |
|
"grad_norm": 1.4756711967934037, |
|
"learning_rate": 3.83739837398374e-05, |
|
"loss": 1.2656, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.4820253164556962, |
|
"grad_norm": 1.6594867350878697, |
|
"learning_rate": 3.869918699186992e-05, |
|
"loss": 1.2441, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.4860759493670886, |
|
"grad_norm": 1.2931965504555782, |
|
"learning_rate": 3.9024390243902444e-05, |
|
"loss": 1.2627, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.490126582278481, |
|
"grad_norm": 1.4421681062077512, |
|
"learning_rate": 3.9349593495934966e-05, |
|
"loss": 1.2589, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.4941772151898734, |
|
"grad_norm": 1.3215358301769276, |
|
"learning_rate": 3.967479674796748e-05, |
|
"loss": 1.2422, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.4982278481012658, |
|
"grad_norm": 1.4033176184020035, |
|
"learning_rate": 4e-05, |
|
"loss": 1.2563, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.5022784810126583, |
|
"grad_norm": 1.2131679749893058, |
|
"learning_rate": 3.999991946137476e-05, |
|
"loss": 1.2392, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.5063291139240507, |
|
"grad_norm": 1.2803537920940653, |
|
"learning_rate": 3.999967784614766e-05, |
|
"loss": 1.2599, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.5103797468354431, |
|
"grad_norm": 1.597970235336043, |
|
"learning_rate": 3.9999275156264656e-05, |
|
"loss": 1.2392, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.5144303797468355, |
|
"grad_norm": 1.3492040119330886, |
|
"learning_rate": 3.999871139496895e-05, |
|
"loss": 1.2654, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.5184810126582279, |
|
"grad_norm": 1.076342084234448, |
|
"learning_rate": 3.9997986566800995e-05, |
|
"loss": 1.2511, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.5225316455696203, |
|
"grad_norm": 1.899813264157206, |
|
"learning_rate": 3.999710067759846e-05, |
|
"loss": 1.2675, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.5265822784810127, |
|
"grad_norm": 1.373827372090609, |
|
"learning_rate": 3.999605373449617e-05, |
|
"loss": 1.255, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.5306329113924051, |
|
"grad_norm": 2.719124532284048, |
|
"learning_rate": 3.9994845745926075e-05, |
|
"loss": 1.2804, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.5346835443037975, |
|
"grad_norm": 2.435344686205741, |
|
"learning_rate": 3.999347672161713e-05, |
|
"loss": 1.2751, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.5387341772151899, |
|
"grad_norm": 1.4879370516416515, |
|
"learning_rate": 3.999194667259528e-05, |
|
"loss": 1.2627, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.5427848101265823, |
|
"grad_norm": 1.34864506211106, |
|
"learning_rate": 3.999025561118334e-05, |
|
"loss": 1.238, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.5468354430379747, |
|
"grad_norm": 1.7047755779787932, |
|
"learning_rate": 3.998840355100086e-05, |
|
"loss": 1.2883, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.5508860759493671, |
|
"grad_norm": 1.2748363587545162, |
|
"learning_rate": 3.998639050696409e-05, |
|
"loss": 1.2785, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.5549367088607595, |
|
"grad_norm": 1.360745828233388, |
|
"learning_rate": 3.998421649528582e-05, |
|
"loss": 1.2438, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.5589873417721519, |
|
"grad_norm": 1.3488259518134602, |
|
"learning_rate": 3.9981881533475234e-05, |
|
"loss": 1.2827, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.5630379746835443, |
|
"grad_norm": 1.1972255305729358, |
|
"learning_rate": 3.997938564033779e-05, |
|
"loss": 1.2701, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.5670886075949367, |
|
"grad_norm": 1.2102473888114986, |
|
"learning_rate": 3.9976728835975064e-05, |
|
"loss": 1.2541, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.5711392405063291, |
|
"grad_norm": 1.1663169480091446, |
|
"learning_rate": 3.9973911141784605e-05, |
|
"loss": 1.2621, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.5751898734177215, |
|
"grad_norm": 1.4806161983909714, |
|
"learning_rate": 3.997093258045973e-05, |
|
"loss": 1.2593, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.579240506329114, |
|
"grad_norm": 1.164207043995882, |
|
"learning_rate": 3.996779317598936e-05, |
|
"loss": 1.276, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.5832911392405064, |
|
"grad_norm": 1.496751937640438, |
|
"learning_rate": 3.996449295365782e-05, |
|
"loss": 1.2905, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.5873417721518988, |
|
"grad_norm": 1.2440381877527282, |
|
"learning_rate": 3.996103194004467e-05, |
|
"loss": 1.2701, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.5913924050632912, |
|
"grad_norm": 1.4853429120691295, |
|
"learning_rate": 3.995741016302441e-05, |
|
"loss": 1.2764, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.5954430379746836, |
|
"grad_norm": 1.23447056784449, |
|
"learning_rate": 3.9953627651766364e-05, |
|
"loss": 1.2244, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.599493670886076, |
|
"grad_norm": 1.5012986890025253, |
|
"learning_rate": 3.9949684436734325e-05, |
|
"loss": 1.2537, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.6035443037974684, |
|
"grad_norm": 1.3900807200729093, |
|
"learning_rate": 3.994558054968643e-05, |
|
"loss": 1.2273, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.6075949367088608, |
|
"grad_norm": 1.0319429270350107, |
|
"learning_rate": 3.994131602367481e-05, |
|
"loss": 1.2336, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.6116455696202532, |
|
"grad_norm": 1.2890670709747514, |
|
"learning_rate": 3.9936890893045376e-05, |
|
"loss": 1.26, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.6156962025316456, |
|
"grad_norm": 0.9504177397537688, |
|
"learning_rate": 3.993230519343752e-05, |
|
"loss": 1.2896, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.619746835443038, |
|
"grad_norm": 1.2073125952710062, |
|
"learning_rate": 3.992755896178383e-05, |
|
"loss": 1.266, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.6237974683544304, |
|
"grad_norm": 1.2108206782251456, |
|
"learning_rate": 3.992265223630981e-05, |
|
"loss": 1.2812, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.6278481012658228, |
|
"grad_norm": 1.370421326496685, |
|
"learning_rate": 3.991758505653355e-05, |
|
"loss": 1.2793, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.6318987341772152, |
|
"grad_norm": 1.0617061947505173, |
|
"learning_rate": 3.991235746326543e-05, |
|
"loss": 1.2224, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.6359493670886076, |
|
"grad_norm": 0.9864746726362693, |
|
"learning_rate": 3.9906969498607745e-05, |
|
"loss": 1.2725, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.3867783865601402, |
|
"learning_rate": 3.990142120595444e-05, |
|
"loss": 1.271, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.6440506329113924, |
|
"grad_norm": 1.242400174400085, |
|
"learning_rate": 3.98957126299907e-05, |
|
"loss": 1.2688, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.6481012658227848, |
|
"grad_norm": 1.619620311598154, |
|
"learning_rate": 3.9889843816692596e-05, |
|
"loss": 1.2511, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.6521518987341772, |
|
"grad_norm": 1.4364484447137902, |
|
"learning_rate": 3.9883814813326766e-05, |
|
"loss": 1.249, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.6562025316455696, |
|
"grad_norm": 1.771055800949035, |
|
"learning_rate": 3.9877625668449956e-05, |
|
"loss": 1.238, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.660253164556962, |
|
"grad_norm": 1.3906554826582653, |
|
"learning_rate": 3.98712764319087e-05, |
|
"loss": 1.2396, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.6643037974683544, |
|
"grad_norm": 1.6066635144357058, |
|
"learning_rate": 3.9864767154838864e-05, |
|
"loss": 1.2528, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.6683544303797468, |
|
"grad_norm": 1.2070793666484902, |
|
"learning_rate": 3.9858097889665277e-05, |
|
"loss": 1.2808, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.6724050632911392, |
|
"grad_norm": 1.4549875949987627, |
|
"learning_rate": 3.985126869010129e-05, |
|
"loss": 1.2714, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.6764556962025317, |
|
"grad_norm": 1.141041753325671, |
|
"learning_rate": 3.984427961114833e-05, |
|
"loss": 1.2203, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.6805063291139241, |
|
"grad_norm": 0.9476671118042549, |
|
"learning_rate": 3.9837130709095475e-05, |
|
"loss": 1.2807, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.6845569620253165, |
|
"grad_norm": 1.3453793183108158, |
|
"learning_rate": 3.982982204151901e-05, |
|
"loss": 1.2677, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.6886075949367089, |
|
"grad_norm": 0.9933107203125915, |
|
"learning_rate": 3.982235366728193e-05, |
|
"loss": 1.2344, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.6926582278481013, |
|
"grad_norm": 1.348743937930853, |
|
"learning_rate": 3.9814725646533505e-05, |
|
"loss": 1.2579, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.6967088607594937, |
|
"grad_norm": 1.025320445251145, |
|
"learning_rate": 3.9806938040708746e-05, |
|
"loss": 1.2678, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.7007594936708861, |
|
"grad_norm": 1.5449402833336423, |
|
"learning_rate": 3.9798990912527976e-05, |
|
"loss": 1.2381, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.7048101265822785, |
|
"grad_norm": 1.1397546230689823, |
|
"learning_rate": 3.979088432599627e-05, |
|
"loss": 1.2707, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.7088607594936709, |
|
"grad_norm": 1.9800174349192665, |
|
"learning_rate": 3.9782618346402964e-05, |
|
"loss": 1.2749, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.7129113924050633, |
|
"grad_norm": 1.4654625468202886, |
|
"learning_rate": 3.977419304032111e-05, |
|
"loss": 1.2479, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.7169620253164557, |
|
"grad_norm": 1.5649948657250898, |
|
"learning_rate": 3.976560847560697e-05, |
|
"loss": 1.2452, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.7210126582278481, |
|
"grad_norm": 1.3706715679622867, |
|
"learning_rate": 3.9756864721399456e-05, |
|
"loss": 1.2522, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.7250632911392405, |
|
"grad_norm": 1.0407236047805566, |
|
"learning_rate": 3.974796184811956e-05, |
|
"loss": 1.2632, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.7291139240506329, |
|
"grad_norm": 1.9113746282620594, |
|
"learning_rate": 3.973889992746979e-05, |
|
"loss": 1.2509, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.7331645569620253, |
|
"grad_norm": 1.4033149511035359, |
|
"learning_rate": 3.972967903243361e-05, |
|
"loss": 1.2609, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.7372151898734177, |
|
"grad_norm": 1.807747226468799, |
|
"learning_rate": 3.972029923727486e-05, |
|
"loss": 1.2613, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.7412658227848101, |
|
"grad_norm": 1.5268291322133318, |
|
"learning_rate": 3.971076061753709e-05, |
|
"loss": 1.2703, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.7453164556962025, |
|
"grad_norm": 1.588719886394012, |
|
"learning_rate": 3.9701063250043066e-05, |
|
"loss": 1.242, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.7493670886075949, |
|
"grad_norm": 1.1958866572498055, |
|
"learning_rate": 3.969120721289402e-05, |
|
"loss": 1.2543, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.7534177215189873, |
|
"grad_norm": 1.3773625479004505, |
|
"learning_rate": 3.9681192585469146e-05, |
|
"loss": 1.2437, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.7574683544303797, |
|
"grad_norm": 1.5170765768483432, |
|
"learning_rate": 3.9671019448424865e-05, |
|
"loss": 1.2384, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.7615189873417721, |
|
"grad_norm": 1.1407162392852277, |
|
"learning_rate": 3.966068788369422e-05, |
|
"loss": 1.2485, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.7655696202531646, |
|
"grad_norm": 1.5386512048407477, |
|
"learning_rate": 3.965019797448622e-05, |
|
"loss": 1.2679, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.769620253164557, |
|
"grad_norm": 1.2251730690225693, |
|
"learning_rate": 3.963954980528515e-05, |
|
"loss": 1.2536, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.7736708860759494, |
|
"grad_norm": 1.408108724704713, |
|
"learning_rate": 3.9628743461849905e-05, |
|
"loss": 1.2397, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.7777215189873418, |
|
"grad_norm": 1.2197980084638553, |
|
"learning_rate": 3.961777903121329e-05, |
|
"loss": 1.2447, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.7817721518987342, |
|
"grad_norm": 1.2746670933719755, |
|
"learning_rate": 3.960665660168131e-05, |
|
"loss": 1.2293, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.7858227848101266, |
|
"grad_norm": 1.3512867233034471, |
|
"learning_rate": 3.9595376262832485e-05, |
|
"loss": 1.2557, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.789873417721519, |
|
"grad_norm": 1.1329461056672794, |
|
"learning_rate": 3.9583938105517127e-05, |
|
"loss": 1.254, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.7939240506329114, |
|
"grad_norm": 1.2668346122283785, |
|
"learning_rate": 3.957234222185657e-05, |
|
"loss": 1.2623, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.7979746835443038, |
|
"grad_norm": 1.2443396585478637, |
|
"learning_rate": 3.9560588705242474e-05, |
|
"loss": 1.2528, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.8020253164556962, |
|
"grad_norm": 0.9150808190583143, |
|
"learning_rate": 3.954867765033605e-05, |
|
"loss": 1.222, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.8060759493670886, |
|
"grad_norm": 1.4263164131533803, |
|
"learning_rate": 3.953660915306728e-05, |
|
"loss": 1.2688, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.810126582278481, |
|
"grad_norm": 0.8411367083420277, |
|
"learning_rate": 3.952438331063419e-05, |
|
"loss": 1.2579, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.8141772151898734, |
|
"grad_norm": 1.5906717000463564, |
|
"learning_rate": 3.951200022150205e-05, |
|
"loss": 1.2429, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.8182278481012658, |
|
"grad_norm": 0.9530431447291341, |
|
"learning_rate": 3.949945998540253e-05, |
|
"loss": 1.2553, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.8222784810126582, |
|
"grad_norm": 1.8358571422342145, |
|
"learning_rate": 3.9486762703332993e-05, |
|
"loss": 1.2358, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.8263291139240506, |
|
"grad_norm": 1.4585528468201707, |
|
"learning_rate": 3.947390847755559e-05, |
|
"loss": 1.2582, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.830379746835443, |
|
"grad_norm": 1.4275653096713663, |
|
"learning_rate": 3.946089741159648e-05, |
|
"loss": 1.2301, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.8344303797468354, |
|
"grad_norm": 1.3658094777637684, |
|
"learning_rate": 3.944772961024501e-05, |
|
"loss": 1.2337, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.8384810126582278, |
|
"grad_norm": 1.280353091067545, |
|
"learning_rate": 3.943440517955285e-05, |
|
"loss": 1.2316, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.8425316455696202, |
|
"grad_norm": 1.1405926514661329, |
|
"learning_rate": 3.9420924226833126e-05, |
|
"loss": 1.226, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.8465822784810126, |
|
"grad_norm": 1.1306669999517491, |
|
"learning_rate": 3.9407286860659566e-05, |
|
"loss": 1.2446, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.850632911392405, |
|
"grad_norm": 1.1016566466283921, |
|
"learning_rate": 3.9393493190865657e-05, |
|
"loss": 1.2381, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.8546835443037974, |
|
"grad_norm": 1.2199977346747586, |
|
"learning_rate": 3.937954332854371e-05, |
|
"loss": 1.2289, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.8587341772151899, |
|
"grad_norm": 1.091176952654132, |
|
"learning_rate": 3.9365437386044016e-05, |
|
"loss": 1.224, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.8627848101265823, |
|
"grad_norm": 1.2094906668588903, |
|
"learning_rate": 3.935117547697387e-05, |
|
"loss": 1.2343, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.8668354430379747, |
|
"grad_norm": 1.1052019513312572, |
|
"learning_rate": 3.933675771619675e-05, |
|
"loss": 1.254, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.8708860759493671, |
|
"grad_norm": 1.151212277526975, |
|
"learning_rate": 3.932218421983131e-05, |
|
"loss": 1.2299, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.8749367088607595, |
|
"grad_norm": 1.0187444896650277, |
|
"learning_rate": 3.9307455105250484e-05, |
|
"loss": 1.2198, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.8789873417721519, |
|
"grad_norm": 1.2024717094843038, |
|
"learning_rate": 3.929257049108054e-05, |
|
"loss": 1.2441, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.8830379746835443, |
|
"grad_norm": 1.015676239980724, |
|
"learning_rate": 3.927753049720011e-05, |
|
"loss": 1.2403, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.8870886075949367, |
|
"grad_norm": 1.040506184227416, |
|
"learning_rate": 3.9262335244739234e-05, |
|
"loss": 1.2335, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.8911392405063291, |
|
"grad_norm": 1.0732980215644987, |
|
"learning_rate": 3.92469848560784e-05, |
|
"loss": 1.2351, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.8951898734177215, |
|
"grad_norm": 1.3117768548399273, |
|
"learning_rate": 3.923147945484751e-05, |
|
"loss": 1.25, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.8992405063291139, |
|
"grad_norm": 1.0586212065587202, |
|
"learning_rate": 3.9215819165924956e-05, |
|
"loss": 1.2384, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.9032911392405063, |
|
"grad_norm": 1.2378747791210107, |
|
"learning_rate": 3.920000411543654e-05, |
|
"loss": 1.2466, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.9073417721518987, |
|
"grad_norm": 0.9384588852195727, |
|
"learning_rate": 3.9184034430754495e-05, |
|
"loss": 1.2305, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.9113924050632911, |
|
"grad_norm": 1.0404840108399178, |
|
"learning_rate": 3.916791024049648e-05, |
|
"loss": 1.2446, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.9154430379746835, |
|
"grad_norm": 0.9971829751292264, |
|
"learning_rate": 3.91516316745245e-05, |
|
"loss": 1.2365, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.9194936708860759, |
|
"grad_norm": 1.0817489648840979, |
|
"learning_rate": 3.913519886394389e-05, |
|
"loss": 1.2292, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.9235443037974683, |
|
"grad_norm": 0.9593454419239398, |
|
"learning_rate": 3.911861194110225e-05, |
|
"loss": 1.2326, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.9275949367088607, |
|
"grad_norm": 0.9640531283593995, |
|
"learning_rate": 3.910187103958837e-05, |
|
"loss": 1.2556, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.9316455696202531, |
|
"grad_norm": 0.8540057120715031, |
|
"learning_rate": 3.908497629423117e-05, |
|
"loss": 1.253, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.9356962025316455, |
|
"grad_norm": 1.0964404624563135, |
|
"learning_rate": 3.9067927841098614e-05, |
|
"loss": 1.2341, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.9397468354430379, |
|
"grad_norm": 0.7998900875727888, |
|
"learning_rate": 3.9050725817496594e-05, |
|
"loss": 1.2362, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.9437974683544303, |
|
"grad_norm": 1.1273197844834193, |
|
"learning_rate": 3.9033370361967844e-05, |
|
"loss": 1.2618, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.9478481012658228, |
|
"grad_norm": 0.677859781960143, |
|
"learning_rate": 3.901586161429081e-05, |
|
"loss": 1.2647, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.9518987341772152, |
|
"grad_norm": 1.0674003272288481, |
|
"learning_rate": 3.8998199715478545e-05, |
|
"loss": 1.2466, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.9559493670886076, |
|
"grad_norm": 0.7962551984225934, |
|
"learning_rate": 3.8980384807777564e-05, |
|
"loss": 1.2417, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.7064423932237379, |
|
"learning_rate": 3.896241703466667e-05, |
|
"loss": 1.2369, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.9640506329113924, |
|
"grad_norm": 0.8933875773364179, |
|
"learning_rate": 3.894429654085585e-05, |
|
"loss": 1.2328, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.9681012658227848, |
|
"grad_norm": 0.8826540039267996, |
|
"learning_rate": 3.892602347228505e-05, |
|
"loss": 1.2429, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.9721518987341772, |
|
"grad_norm": 0.6704718607742554, |
|
"learning_rate": 3.890759797612307e-05, |
|
"loss": 1.2478, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.9762025316455696, |
|
"grad_norm": 0.793481933029087, |
|
"learning_rate": 3.888902020076632e-05, |
|
"loss": 1.2312, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.980253164556962, |
|
"grad_norm": 0.8067636039561972, |
|
"learning_rate": 3.887029029583764e-05, |
|
"loss": 1.2203, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.9843037974683544, |
|
"grad_norm": 0.8264781583647357, |
|
"learning_rate": 3.8851408412185125e-05, |
|
"loss": 1.2327, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.9883544303797468, |
|
"grad_norm": 0.6661350310643096, |
|
"learning_rate": 3.8832374701880855e-05, |
|
"loss": 1.2318, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.9924050632911392, |
|
"grad_norm": 0.7734851936871938, |
|
"learning_rate": 3.881318931821972e-05, |
|
"loss": 1.2221, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.9964556962025316, |
|
"grad_norm": 0.7214510416742959, |
|
"learning_rate": 3.879385241571817e-05, |
|
"loss": 1.2407, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.000506329113924, |
|
"grad_norm": 0.7423834707176196, |
|
"learning_rate": 3.8774364150112955e-05, |
|
"loss": 1.2245, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 1.0045569620253165, |
|
"grad_norm": 0.8383601022919843, |
|
"learning_rate": 3.8754724678359884e-05, |
|
"loss": 1.1686, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.0086075949367088, |
|
"grad_norm": 0.7752393811446099, |
|
"learning_rate": 3.873493415863256e-05, |
|
"loss": 1.1787, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 1.0126582278481013, |
|
"grad_norm": 1.0555626500487545, |
|
"learning_rate": 3.871499275032111e-05, |
|
"loss": 1.1553, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.0167088607594936, |
|
"grad_norm": 0.8644750555468655, |
|
"learning_rate": 3.869490061403091e-05, |
|
"loss": 1.1663, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 1.0207594936708861, |
|
"grad_norm": 0.7903490707195843, |
|
"learning_rate": 3.867465791158124e-05, |
|
"loss": 1.1381, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 1.0248101265822784, |
|
"grad_norm": 0.9052862072140597, |
|
"learning_rate": 3.865426480600407e-05, |
|
"loss": 1.1349, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 1.028860759493671, |
|
"grad_norm": 1.0553629610990418, |
|
"learning_rate": 3.863372146154264e-05, |
|
"loss": 1.1584, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 1.0329113924050632, |
|
"grad_norm": 1.0378925889386388, |
|
"learning_rate": 3.861302804365024e-05, |
|
"loss": 1.1607, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.0369620253164558, |
|
"grad_norm": 0.8873461532847762, |
|
"learning_rate": 3.85921847189888e-05, |
|
"loss": 1.1783, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 1.041012658227848, |
|
"grad_norm": 0.9147625205012248, |
|
"learning_rate": 3.85711916554276e-05, |
|
"loss": 1.1833, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 1.0450632911392406, |
|
"grad_norm": 0.8773054664826107, |
|
"learning_rate": 3.85500490220419e-05, |
|
"loss": 1.143, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 1.0491139240506329, |
|
"grad_norm": 0.983855951569538, |
|
"learning_rate": 3.852875698911154e-05, |
|
"loss": 1.1842, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 1.0531645569620254, |
|
"grad_norm": 1.0066064367608125, |
|
"learning_rate": 3.850731572811963e-05, |
|
"loss": 1.1526, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.0572151898734177, |
|
"grad_norm": 1.030816302493887, |
|
"learning_rate": 3.848572541175116e-05, |
|
"loss": 1.1598, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 1.0612658227848102, |
|
"grad_norm": 0.6185350800183042, |
|
"learning_rate": 3.846398621389154e-05, |
|
"loss": 1.1376, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 1.0653164556962025, |
|
"grad_norm": 0.784175503442257, |
|
"learning_rate": 3.84420983096253e-05, |
|
"loss": 1.1713, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 1.069367088607595, |
|
"grad_norm": 0.7244663857327224, |
|
"learning_rate": 3.8420061875234606e-05, |
|
"loss": 1.1349, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 1.0734177215189873, |
|
"grad_norm": 0.7027298354232484, |
|
"learning_rate": 3.839787708819787e-05, |
|
"loss": 1.185, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.0774683544303798, |
|
"grad_norm": 0.7696537298890939, |
|
"learning_rate": 3.8375544127188325e-05, |
|
"loss": 1.1591, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 1.081518987341772, |
|
"grad_norm": 0.7237553367385098, |
|
"learning_rate": 3.8353063172072564e-05, |
|
"loss": 1.1515, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 1.0855696202531646, |
|
"grad_norm": 0.7911600084883476, |
|
"learning_rate": 3.8330434403909105e-05, |
|
"loss": 1.1705, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 1.089620253164557, |
|
"grad_norm": 0.979499300176695, |
|
"learning_rate": 3.8307658004946934e-05, |
|
"loss": 1.1479, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 1.0936708860759494, |
|
"grad_norm": 0.9091071735595879, |
|
"learning_rate": 3.8284734158624046e-05, |
|
"loss": 1.1391, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.0977215189873417, |
|
"grad_norm": 0.7947885444196131, |
|
"learning_rate": 3.826166304956594e-05, |
|
"loss": 1.1301, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 1.1017721518987342, |
|
"grad_norm": 0.762571569929015, |
|
"learning_rate": 3.8238444863584164e-05, |
|
"loss": 1.136, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 1.1058227848101265, |
|
"grad_norm": 0.6959469248034448, |
|
"learning_rate": 3.821507978767479e-05, |
|
"loss": 1.1352, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 1.109873417721519, |
|
"grad_norm": 0.6016922739401732, |
|
"learning_rate": 3.819156801001693e-05, |
|
"loss": 1.148, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 1.1139240506329113, |
|
"grad_norm": 0.6845312656594692, |
|
"learning_rate": 3.816790971997121e-05, |
|
"loss": 1.1539, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.1179746835443038, |
|
"grad_norm": 0.6192567802478047, |
|
"learning_rate": 3.8144105108078246e-05, |
|
"loss": 1.1717, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 1.1220253164556961, |
|
"grad_norm": 0.6244099682043345, |
|
"learning_rate": 3.81201543660571e-05, |
|
"loss": 1.1468, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 1.1260759493670887, |
|
"grad_norm": 0.781393599265035, |
|
"learning_rate": 3.809605768680377e-05, |
|
"loss": 1.1421, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 1.130126582278481, |
|
"grad_norm": 0.8145652656258217, |
|
"learning_rate": 3.807181526438958e-05, |
|
"loss": 1.1401, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 1.1341772151898735, |
|
"grad_norm": 0.752360146007145, |
|
"learning_rate": 3.8047427294059697e-05, |
|
"loss": 1.1634, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.1382278481012658, |
|
"grad_norm": 0.8436105745370066, |
|
"learning_rate": 3.802289397223145e-05, |
|
"loss": 1.1427, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 1.1422784810126583, |
|
"grad_norm": 0.821123052171786, |
|
"learning_rate": 3.7998215496492854e-05, |
|
"loss": 1.1388, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 1.1463291139240506, |
|
"grad_norm": 0.8433498945167828, |
|
"learning_rate": 3.797339206560096e-05, |
|
"loss": 1.1552, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 1.150379746835443, |
|
"grad_norm": 0.8131224800162512, |
|
"learning_rate": 3.794842387948027e-05, |
|
"loss": 1.1682, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 1.1544303797468354, |
|
"grad_norm": 0.8852924842055218, |
|
"learning_rate": 3.7923311139221114e-05, |
|
"loss": 1.1368, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.158481012658228, |
|
"grad_norm": 0.7732441498199645, |
|
"learning_rate": 3.7898054047078054e-05, |
|
"loss": 1.1705, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 1.1625316455696202, |
|
"grad_norm": 0.6668429237987028, |
|
"learning_rate": 3.787265280646825e-05, |
|
"loss": 1.1425, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 1.1665822784810127, |
|
"grad_norm": 0.732408955517127, |
|
"learning_rate": 3.7847107621969786e-05, |
|
"loss": 1.1504, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 1.170632911392405, |
|
"grad_norm": 0.7528788012879877, |
|
"learning_rate": 3.7821418699320064e-05, |
|
"loss": 1.1236, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 1.1746835443037975, |
|
"grad_norm": 0.6365757616045409, |
|
"learning_rate": 3.7795586245414145e-05, |
|
"loss": 1.1255, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.1787341772151898, |
|
"grad_norm": 0.8669354635089168, |
|
"learning_rate": 3.776961046830306e-05, |
|
"loss": 1.1472, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 1.1827848101265823, |
|
"grad_norm": 0.810762853691914, |
|
"learning_rate": 3.774349157719215e-05, |
|
"loss": 1.1671, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 1.1868354430379746, |
|
"grad_norm": 0.8215539623348017, |
|
"learning_rate": 3.7717229782439365e-05, |
|
"loss": 1.1496, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 1.1908860759493671, |
|
"grad_norm": 1.001540768276704, |
|
"learning_rate": 3.769082529555359e-05, |
|
"loss": 1.1196, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 1.1949367088607594, |
|
"grad_norm": 0.8607157287008111, |
|
"learning_rate": 3.766427832919294e-05, |
|
"loss": 1.1462, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.198987341772152, |
|
"grad_norm": 0.8283940748533274, |
|
"learning_rate": 3.7637589097163024e-05, |
|
"loss": 1.1633, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 1.2030379746835442, |
|
"grad_norm": 0.798549350225124, |
|
"learning_rate": 3.761075781441526e-05, |
|
"loss": 1.1463, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 1.2070886075949367, |
|
"grad_norm": 1.1481258101752376, |
|
"learning_rate": 3.75837846970451e-05, |
|
"loss": 1.1495, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 1.211139240506329, |
|
"grad_norm": 0.883973923902975, |
|
"learning_rate": 3.755666996229032e-05, |
|
"loss": 1.1623, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 1.2151898734177216, |
|
"grad_norm": 0.6879847118579288, |
|
"learning_rate": 3.752941382852927e-05, |
|
"loss": 1.136, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.2192405063291138, |
|
"grad_norm": 0.8637709924391279, |
|
"learning_rate": 3.7502016515279115e-05, |
|
"loss": 1.1475, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 1.2232911392405064, |
|
"grad_norm": 0.6702391522578585, |
|
"learning_rate": 3.7474478243194043e-05, |
|
"loss": 1.1578, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 1.2273417721518987, |
|
"grad_norm": 0.756186949373457, |
|
"learning_rate": 3.744679923406351e-05, |
|
"loss": 1.1916, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 1.2313924050632912, |
|
"grad_norm": 0.7223244530105629, |
|
"learning_rate": 3.741897971081043e-05, |
|
"loss": 1.1683, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 1.2354430379746835, |
|
"grad_norm": 0.6231641676280993, |
|
"learning_rate": 3.739101989748946e-05, |
|
"loss": 1.1566, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.239493670886076, |
|
"grad_norm": 0.6955808201773662, |
|
"learning_rate": 3.7362920019285066e-05, |
|
"loss": 1.1796, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 1.2435443037974683, |
|
"grad_norm": 0.7012468832695, |
|
"learning_rate": 3.73346803025098e-05, |
|
"loss": 1.1681, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 1.2475949367088608, |
|
"grad_norm": 0.63016610156459, |
|
"learning_rate": 3.730630097460247e-05, |
|
"loss": 1.1667, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 1.251645569620253, |
|
"grad_norm": 0.7214578159176559, |
|
"learning_rate": 3.727778226412628e-05, |
|
"loss": 1.165, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 1.2556962025316456, |
|
"grad_norm": 0.627002991751359, |
|
"learning_rate": 3.7249124400767006e-05, |
|
"loss": 1.1635, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.259746835443038, |
|
"grad_norm": 0.6697395212411436, |
|
"learning_rate": 3.722032761533114e-05, |
|
"loss": 1.1859, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 1.2637974683544304, |
|
"grad_norm": 0.7000380529685968, |
|
"learning_rate": 3.719139213974403e-05, |
|
"loss": 1.1387, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 1.2678481012658227, |
|
"grad_norm": 0.6473325779455186, |
|
"learning_rate": 3.7162318207048006e-05, |
|
"loss": 1.1262, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 1.2718987341772152, |
|
"grad_norm": 0.6456559775390991, |
|
"learning_rate": 3.713310605140055e-05, |
|
"loss": 1.1448, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 1.2759493670886077, |
|
"grad_norm": 0.6217278712396029, |
|
"learning_rate": 3.710375590807233e-05, |
|
"loss": 1.1608, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.6016368996868794, |
|
"learning_rate": 3.7074268013445365e-05, |
|
"loss": 1.1826, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 1.2840506329113923, |
|
"grad_norm": 0.6784845387964498, |
|
"learning_rate": 3.7044642605011114e-05, |
|
"loss": 1.1616, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 1.2881012658227848, |
|
"grad_norm": 0.649227468182824, |
|
"learning_rate": 3.701487992136854e-05, |
|
"loss": 1.1251, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 1.2921518987341774, |
|
"grad_norm": 0.5937696910287711, |
|
"learning_rate": 3.69849802022222e-05, |
|
"loss": 1.1463, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 1.2962025316455696, |
|
"grad_norm": 0.5829389009078099, |
|
"learning_rate": 3.6954943688380334e-05, |
|
"loss": 1.1536, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.300253164556962, |
|
"grad_norm": 0.7121087361584678, |
|
"learning_rate": 3.692477062175289e-05, |
|
"loss": 1.1565, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 1.3043037974683545, |
|
"grad_norm": 0.5848203978143939, |
|
"learning_rate": 3.689446124534958e-05, |
|
"loss": 1.1528, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 1.308354430379747, |
|
"grad_norm": 0.7990113872738007, |
|
"learning_rate": 3.686401580327799e-05, |
|
"loss": 1.1668, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 1.3124050632911393, |
|
"grad_norm": 0.6615969360285675, |
|
"learning_rate": 3.683343454074149e-05, |
|
"loss": 1.1789, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 1.3164556962025316, |
|
"grad_norm": 0.740429977462607, |
|
"learning_rate": 3.6802717704037386e-05, |
|
"loss": 1.1588, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.320506329113924, |
|
"grad_norm": 0.7712169641916816, |
|
"learning_rate": 3.6771865540554855e-05, |
|
"loss": 1.1601, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 1.3245569620253166, |
|
"grad_norm": 0.6623743607871191, |
|
"learning_rate": 3.674087829877297e-05, |
|
"loss": 1.1436, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 1.3286075949367089, |
|
"grad_norm": 0.7112983437937824, |
|
"learning_rate": 3.6709756228258735e-05, |
|
"loss": 1.1526, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 1.3326582278481012, |
|
"grad_norm": 0.6054599920574315, |
|
"learning_rate": 3.667849957966501e-05, |
|
"loss": 1.1514, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 1.3367088607594937, |
|
"grad_norm": 0.5898683468456464, |
|
"learning_rate": 3.6647108604728546e-05, |
|
"loss": 1.1174, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.3407594936708862, |
|
"grad_norm": 0.592255716063486, |
|
"learning_rate": 3.661558355626795e-05, |
|
"loss": 1.1712, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 1.3448101265822785, |
|
"grad_norm": 0.759368211257049, |
|
"learning_rate": 3.658392468818163e-05, |
|
"loss": 1.145, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 1.3488607594936708, |
|
"grad_norm": 0.9020925843152267, |
|
"learning_rate": 3.655213225544574e-05, |
|
"loss": 1.1578, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 1.3529113924050633, |
|
"grad_norm": 0.9938801759919645, |
|
"learning_rate": 3.652020651411218e-05, |
|
"loss": 1.1487, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 1.3569620253164558, |
|
"grad_norm": 0.6822022172751128, |
|
"learning_rate": 3.6488147721306474e-05, |
|
"loss": 1.1654, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.3610126582278481, |
|
"grad_norm": 0.9960011796091998, |
|
"learning_rate": 3.645595613522574e-05, |
|
"loss": 1.1788, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 1.3650632911392404, |
|
"grad_norm": 0.9141639339449058, |
|
"learning_rate": 3.642363201513657e-05, |
|
"loss": 1.167, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 1.369113924050633, |
|
"grad_norm": 0.6811586836150645, |
|
"learning_rate": 3.6391175621373006e-05, |
|
"loss": 1.156, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 1.3731645569620254, |
|
"grad_norm": 0.7817160648787812, |
|
"learning_rate": 3.6358587215334355e-05, |
|
"loss": 1.169, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 1.3772151898734177, |
|
"grad_norm": 0.6482912094523019, |
|
"learning_rate": 3.632586705948318e-05, |
|
"loss": 1.1469, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.38126582278481, |
|
"grad_norm": 0.7357875186731013, |
|
"learning_rate": 3.629301541734311e-05, |
|
"loss": 1.1628, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 1.3853164556962025, |
|
"grad_norm": 0.6620856393585671, |
|
"learning_rate": 3.626003255349676e-05, |
|
"loss": 1.1778, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 1.389367088607595, |
|
"grad_norm": 0.7643591924645242, |
|
"learning_rate": 3.622691873358357e-05, |
|
"loss": 1.1422, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 1.3934177215189874, |
|
"grad_norm": 0.6753964890285318, |
|
"learning_rate": 3.61936742242977e-05, |
|
"loss": 1.1869, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 1.3974683544303796, |
|
"grad_norm": 0.6687875402040061, |
|
"learning_rate": 3.6160299293385864e-05, |
|
"loss": 1.1595, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.4015189873417722, |
|
"grad_norm": 0.6807292748902014, |
|
"learning_rate": 3.612679420964516e-05, |
|
"loss": 1.1661, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 1.4055696202531647, |
|
"grad_norm": 0.6674172603846734, |
|
"learning_rate": 3.609315924292092e-05, |
|
"loss": 1.1652, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 1.409620253164557, |
|
"grad_norm": 0.6689268597921838, |
|
"learning_rate": 3.6059394664104554e-05, |
|
"loss": 1.1659, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 1.4136708860759493, |
|
"grad_norm": 0.6746394490078387, |
|
"learning_rate": 3.602550074513133e-05, |
|
"loss": 1.1515, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 1.4177215189873418, |
|
"grad_norm": 0.7444518127676196, |
|
"learning_rate": 3.599147775897822e-05, |
|
"loss": 1.1447, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.4217721518987343, |
|
"grad_norm": 0.6535355941505684, |
|
"learning_rate": 3.595732597966167e-05, |
|
"loss": 1.171, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 1.4258227848101266, |
|
"grad_norm": 0.6101329784272493, |
|
"learning_rate": 3.592304568223542e-05, |
|
"loss": 1.1597, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 1.4298734177215189, |
|
"grad_norm": 0.7482485283544967, |
|
"learning_rate": 3.588863714278826e-05, |
|
"loss": 1.1436, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 1.4339240506329114, |
|
"grad_norm": 0.647677565696097, |
|
"learning_rate": 3.585410063844186e-05, |
|
"loss": 1.1655, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 1.437974683544304, |
|
"grad_norm": 0.7440958911067717, |
|
"learning_rate": 3.581943644734846e-05, |
|
"loss": 1.1629, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.4420253164556962, |
|
"grad_norm": 0.7960136581017142, |
|
"learning_rate": 3.578464484868869e-05, |
|
"loss": 1.1367, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 1.4460759493670885, |
|
"grad_norm": 0.8331818708033708, |
|
"learning_rate": 3.5749726122669316e-05, |
|
"loss": 1.1491, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 1.450126582278481, |
|
"grad_norm": 0.7053542335276067, |
|
"learning_rate": 3.5714680550520943e-05, |
|
"loss": 1.1682, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 1.4541772151898735, |
|
"grad_norm": 0.7205034913380446, |
|
"learning_rate": 3.5679508414495794e-05, |
|
"loss": 1.1618, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 1.4582278481012658, |
|
"grad_norm": 0.8627257144488724, |
|
"learning_rate": 3.564420999786543e-05, |
|
"loss": 1.1577, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.4622784810126581, |
|
"grad_norm": 0.551923279327445, |
|
"learning_rate": 3.560878558491842e-05, |
|
"loss": 1.1377, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 1.4663291139240506, |
|
"grad_norm": 0.747886445257357, |
|
"learning_rate": 3.5573235460958145e-05, |
|
"loss": 1.1538, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 1.4703797468354431, |
|
"grad_norm": 0.8239461665521868, |
|
"learning_rate": 3.553755991230039e-05, |
|
"loss": 1.184, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 1.4744303797468354, |
|
"grad_norm": 0.7705707398326068, |
|
"learning_rate": 3.5501759226271144e-05, |
|
"loss": 1.143, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 1.4784810126582277, |
|
"grad_norm": 0.8342625943001156, |
|
"learning_rate": 3.546583369120419e-05, |
|
"loss": 1.1698, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.4825316455696202, |
|
"grad_norm": 0.7300540770021413, |
|
"learning_rate": 3.5429783596438864e-05, |
|
"loss": 1.1617, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 1.4865822784810128, |
|
"grad_norm": 0.8955477162323741, |
|
"learning_rate": 3.539360923231766e-05, |
|
"loss": 1.2018, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 1.490632911392405, |
|
"grad_norm": 0.5913027821699496, |
|
"learning_rate": 3.535731089018394e-05, |
|
"loss": 1.1564, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 1.4946835443037974, |
|
"grad_norm": 0.7539256346580357, |
|
"learning_rate": 3.532088886237956e-05, |
|
"loss": 1.1811, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 1.4987341772151899, |
|
"grad_norm": 0.8496773341867427, |
|
"learning_rate": 3.528434344224253e-05, |
|
"loss": 1.1533, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.5027848101265824, |
|
"grad_norm": 0.5760506640810407, |
|
"learning_rate": 3.524767492410464e-05, |
|
"loss": 1.1699, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 1.5068354430379747, |
|
"grad_norm": 0.5646870504067393, |
|
"learning_rate": 3.521088360328908e-05, |
|
"loss": 1.165, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.510886075949367, |
|
"grad_norm": 0.7525565458584117, |
|
"learning_rate": 3.517396977610811e-05, |
|
"loss": 1.1641, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 1.5149367088607595, |
|
"grad_norm": 0.7090679376936387, |
|
"learning_rate": 3.5136933739860595e-05, |
|
"loss": 1.1684, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 1.518987341772152, |
|
"grad_norm": 0.5799881797883643, |
|
"learning_rate": 3.509977579282971e-05, |
|
"loss": 1.1422, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.5230379746835443, |
|
"grad_norm": 0.7007658258573316, |
|
"learning_rate": 3.5062496234280424e-05, |
|
"loss": 1.1718, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 1.5270886075949366, |
|
"grad_norm": 0.6839958246819896, |
|
"learning_rate": 3.502509536445719e-05, |
|
"loss": 1.1646, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 1.531139240506329, |
|
"grad_norm": 0.5825558954705621, |
|
"learning_rate": 3.498757348458147e-05, |
|
"loss": 1.1616, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 1.5351898734177216, |
|
"grad_norm": 0.732261656996349, |
|
"learning_rate": 3.4949930896849324e-05, |
|
"loss": 1.1434, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 1.539240506329114, |
|
"grad_norm": 0.760095262171451, |
|
"learning_rate": 3.491216790442899e-05, |
|
"loss": 1.1722, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.5432911392405062, |
|
"grad_norm": 0.5210055430159378, |
|
"learning_rate": 3.487428481145839e-05, |
|
"loss": 1.1589, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 1.5473417721518987, |
|
"grad_norm": 0.7475749196188591, |
|
"learning_rate": 3.483628192304278e-05, |
|
"loss": 1.1673, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 1.5513924050632912, |
|
"grad_norm": 0.6594452212366566, |
|
"learning_rate": 3.479815954525219e-05, |
|
"loss": 1.1779, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 1.5554430379746835, |
|
"grad_norm": 0.5935043631965298, |
|
"learning_rate": 3.475991798511899e-05, |
|
"loss": 1.1674, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 1.5594936708860758, |
|
"grad_norm": 0.6992857478493116, |
|
"learning_rate": 3.4721557550635464e-05, |
|
"loss": 1.1961, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.5635443037974683, |
|
"grad_norm": 0.6063318005172927, |
|
"learning_rate": 3.468307855075128e-05, |
|
"loss": 1.1789, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 1.5675949367088609, |
|
"grad_norm": 0.6363131764750006, |
|
"learning_rate": 3.4644481295371005e-05, |
|
"loss": 1.1618, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 1.5716455696202531, |
|
"grad_norm": 0.5291270432875703, |
|
"learning_rate": 3.460576609535163e-05, |
|
"loss": 1.1477, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 1.5756962025316454, |
|
"grad_norm": 0.5473029937255857, |
|
"learning_rate": 3.456693326250006e-05, |
|
"loss": 1.1528, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 1.579746835443038, |
|
"grad_norm": 0.5804619624960515, |
|
"learning_rate": 3.452798310957058e-05, |
|
"loss": 1.1654, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.5837974683544305, |
|
"grad_norm": 0.5345193640376179, |
|
"learning_rate": 3.4488915950262386e-05, |
|
"loss": 1.1668, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 1.5878481012658228, |
|
"grad_norm": 0.5321669903077104, |
|
"learning_rate": 3.4449732099216985e-05, |
|
"loss": 1.1626, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 1.591898734177215, |
|
"grad_norm": 0.5700661840884022, |
|
"learning_rate": 3.441043187201574e-05, |
|
"loss": 1.138, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 1.5959493670886076, |
|
"grad_norm": 0.4950685734713498, |
|
"learning_rate": 3.437101558517728e-05, |
|
"loss": 1.1582, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.5533645002535346, |
|
"learning_rate": 3.433148355615496e-05, |
|
"loss": 1.1546, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.6040506329113924, |
|
"grad_norm": 0.5246454919619364, |
|
"learning_rate": 3.4291836103334294e-05, |
|
"loss": 1.1556, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.6081012658227847, |
|
"grad_norm": 0.6980326018027233, |
|
"learning_rate": 3.425207354603043e-05, |
|
"loss": 1.1646, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 1.6121518987341772, |
|
"grad_norm": 0.7159884361474195, |
|
"learning_rate": 3.421219620448553e-05, |
|
"loss": 1.1333, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 1.6162025316455697, |
|
"grad_norm": 0.6856935533848656, |
|
"learning_rate": 3.417220439986623e-05, |
|
"loss": 1.1462, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 1.620253164556962, |
|
"grad_norm": 0.6150648957622421, |
|
"learning_rate": 3.4132098454261024e-05, |
|
"loss": 1.1592, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.6243037974683543, |
|
"grad_norm": 0.6976201592051443, |
|
"learning_rate": 3.4091878690677676e-05, |
|
"loss": 1.1626, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 1.6283544303797468, |
|
"grad_norm": 0.672083586558063, |
|
"learning_rate": 3.405154543304065e-05, |
|
"loss": 1.1803, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 1.6324050632911393, |
|
"grad_norm": 0.6440889804825302, |
|
"learning_rate": 3.401109900618843e-05, |
|
"loss": 1.155, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 1.6364556962025316, |
|
"grad_norm": 0.6128962245701063, |
|
"learning_rate": 3.3970539735870996e-05, |
|
"loss": 1.1503, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 1.640506329113924, |
|
"grad_norm": 0.6878083984717698, |
|
"learning_rate": 3.392986794874714e-05, |
|
"loss": 1.1619, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.6445569620253164, |
|
"grad_norm": 0.6018751128944752, |
|
"learning_rate": 3.388908397238184e-05, |
|
"loss": 1.1764, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 1.648607594936709, |
|
"grad_norm": 0.5943705225984935, |
|
"learning_rate": 3.384818813524362e-05, |
|
"loss": 1.1462, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 1.6526582278481012, |
|
"grad_norm": 0.808146214942223, |
|
"learning_rate": 3.380718076670195e-05, |
|
"loss": 1.1732, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.6567088607594935, |
|
"grad_norm": 0.6218398479232518, |
|
"learning_rate": 3.376606219702454e-05, |
|
"loss": 1.1505, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 1.660759493670886, |
|
"grad_norm": 0.6361800664297104, |
|
"learning_rate": 3.372483275737468e-05, |
|
"loss": 1.1577, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.6648101265822786, |
|
"grad_norm": 0.5346081235658657, |
|
"learning_rate": 3.368349277980861e-05, |
|
"loss": 1.1494, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 1.6688607594936709, |
|
"grad_norm": 0.7240825366635217, |
|
"learning_rate": 3.3642042597272844e-05, |
|
"loss": 1.145, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 1.6729113924050631, |
|
"grad_norm": 0.5233574689181881, |
|
"learning_rate": 3.360048254360144e-05, |
|
"loss": 1.1373, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 1.6769620253164557, |
|
"grad_norm": 0.7205979206799599, |
|
"learning_rate": 3.355881295351336e-05, |
|
"loss": 1.1864, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 1.6810126582278482, |
|
"grad_norm": 0.7731753649396087, |
|
"learning_rate": 3.351703416260975e-05, |
|
"loss": 1.1565, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.6850632911392405, |
|
"grad_norm": 0.6006782603286414, |
|
"learning_rate": 3.347514650737126e-05, |
|
"loss": 1.1406, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 1.6891139240506328, |
|
"grad_norm": 0.6744408915835239, |
|
"learning_rate": 3.3433150325155295e-05, |
|
"loss": 1.1336, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 1.6931645569620253, |
|
"grad_norm": 0.6705273123156373, |
|
"learning_rate": 3.339104595419334e-05, |
|
"loss": 1.1438, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 1.6972151898734178, |
|
"grad_norm": 0.6134641473880076, |
|
"learning_rate": 3.3348833733588204e-05, |
|
"loss": 1.193, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 1.70126582278481, |
|
"grad_norm": 0.6936392964253701, |
|
"learning_rate": 3.3306514003311305e-05, |
|
"loss": 1.1424, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.7053164556962024, |
|
"grad_norm": 0.5294858735781733, |
|
"learning_rate": 3.326408710419996e-05, |
|
"loss": 1.1525, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 1.709367088607595, |
|
"grad_norm": 0.6365635284280515, |
|
"learning_rate": 3.322155337795454e-05, |
|
"loss": 1.1514, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 1.7134177215189874, |
|
"grad_norm": 0.619519354522933, |
|
"learning_rate": 3.317891316713587e-05, |
|
"loss": 1.1525, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 1.7174683544303797, |
|
"grad_norm": 0.5916238717661698, |
|
"learning_rate": 3.313616681516231e-05, |
|
"loss": 1.1851, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 1.721518987341772, |
|
"grad_norm": 0.6651132141169918, |
|
"learning_rate": 3.309331466630713e-05, |
|
"loss": 1.1257, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.7255696202531645, |
|
"grad_norm": 0.7696282910433754, |
|
"learning_rate": 3.305035706569563e-05, |
|
"loss": 1.1363, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 1.729620253164557, |
|
"grad_norm": 0.6742260594846335, |
|
"learning_rate": 3.3007294359302433e-05, |
|
"loss": 1.1505, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 1.7336708860759493, |
|
"grad_norm": 0.6764110275957895, |
|
"learning_rate": 3.296412689394864e-05, |
|
"loss": 1.1575, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 1.7377215189873416, |
|
"grad_norm": 0.5711191881723127, |
|
"learning_rate": 3.292085501729909e-05, |
|
"loss": 1.1787, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 1.7417721518987341, |
|
"grad_norm": 0.7450416218884675, |
|
"learning_rate": 3.2877479077859534e-05, |
|
"loss": 1.1507, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.7458227848101266, |
|
"grad_norm": 0.5312116055737036, |
|
"learning_rate": 3.283399942497381e-05, |
|
"loss": 1.1447, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 1.749873417721519, |
|
"grad_norm": 0.7062451564323639, |
|
"learning_rate": 3.279041640882108e-05, |
|
"loss": 1.1712, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 1.7539240506329112, |
|
"grad_norm": 0.5006868724799162, |
|
"learning_rate": 3.2746730380412964e-05, |
|
"loss": 1.1254, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 1.7579746835443038, |
|
"grad_norm": 0.7479047420884823, |
|
"learning_rate": 3.2702941691590726e-05, |
|
"loss": 1.1599, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 1.7620253164556963, |
|
"grad_norm": 0.5282897190852283, |
|
"learning_rate": 3.265905069502244e-05, |
|
"loss": 1.1604, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.7660759493670886, |
|
"grad_norm": 0.6369812747161735, |
|
"learning_rate": 3.261505774420016e-05, |
|
"loss": 1.149, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 1.7701265822784809, |
|
"grad_norm": 0.6836631059765329, |
|
"learning_rate": 3.257096319343707e-05, |
|
"loss": 1.1753, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 1.7741772151898734, |
|
"grad_norm": 0.5624956993507891, |
|
"learning_rate": 3.2526767397864614e-05, |
|
"loss": 1.1592, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 1.7782278481012659, |
|
"grad_norm": 0.7420241041135692, |
|
"learning_rate": 3.248247071342966e-05, |
|
"loss": 1.1664, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 1.7822784810126582, |
|
"grad_norm": 0.6359882701967161, |
|
"learning_rate": 3.243807349689161e-05, |
|
"loss": 1.1645, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.7863291139240505, |
|
"grad_norm": 0.6263848697402968, |
|
"learning_rate": 3.2393576105819544e-05, |
|
"loss": 1.1379, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 1.790379746835443, |
|
"grad_norm": 0.6462332661644951, |
|
"learning_rate": 3.2348978898589333e-05, |
|
"loss": 1.1563, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 1.7944303797468355, |
|
"grad_norm": 0.6882735134203297, |
|
"learning_rate": 3.230428223438075e-05, |
|
"loss": 1.1428, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 1.7984810126582278, |
|
"grad_norm": 0.5077001134861462, |
|
"learning_rate": 3.225948647317459e-05, |
|
"loss": 1.1433, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 1.80253164556962, |
|
"grad_norm": 0.5920336399781427, |
|
"learning_rate": 3.2214591975749745e-05, |
|
"loss": 1.1651, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.8065822784810126, |
|
"grad_norm": 0.621209099050027, |
|
"learning_rate": 3.216959910368034e-05, |
|
"loss": 1.1525, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 1.8106329113924051, |
|
"grad_norm": 0.6013074242299851, |
|
"learning_rate": 3.212450821933277e-05, |
|
"loss": 1.1638, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 1.8146835443037974, |
|
"grad_norm": 0.5816963099391691, |
|
"learning_rate": 3.207931968586281e-05, |
|
"loss": 1.1399, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 1.81873417721519, |
|
"grad_norm": 0.6288816879723969, |
|
"learning_rate": 3.203403386721272e-05, |
|
"loss": 1.1232, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 1.8227848101265822, |
|
"grad_norm": 0.5444342787208661, |
|
"learning_rate": 3.1988651128108245e-05, |
|
"loss": 1.1444, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.8268354430379747, |
|
"grad_norm": 0.588101277466446, |
|
"learning_rate": 3.194317183405573e-05, |
|
"loss": 1.1695, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 1.8308860759493673, |
|
"grad_norm": 0.6699403701747185, |
|
"learning_rate": 3.189759635133914e-05, |
|
"loss": 1.1409, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 1.8349367088607595, |
|
"grad_norm": 0.659982479744482, |
|
"learning_rate": 3.185192504701718e-05, |
|
"loss": 1.1231, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 1.8389873417721518, |
|
"grad_norm": 0.666191927222366, |
|
"learning_rate": 3.1806158288920234e-05, |
|
"loss": 1.1802, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 1.8430379746835444, |
|
"grad_norm": 0.7567479723085003, |
|
"learning_rate": 3.1760296445647477e-05, |
|
"loss": 1.1685, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.8470886075949369, |
|
"grad_norm": 0.6970138336517401, |
|
"learning_rate": 3.1714339886563896e-05, |
|
"loss": 1.1439, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.8511392405063292, |
|
"grad_norm": 0.5754842225552638, |
|
"learning_rate": 3.166828898179731e-05, |
|
"loss": 1.1517, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 1.8551898734177215, |
|
"grad_norm": 0.6245371628955033, |
|
"learning_rate": 3.162214410223536e-05, |
|
"loss": 1.1626, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 1.859240506329114, |
|
"grad_norm": 0.6354123984954438, |
|
"learning_rate": 3.157590561952257e-05, |
|
"loss": 1.1273, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 1.8632911392405065, |
|
"grad_norm": 0.7466824319740564, |
|
"learning_rate": 3.152957390605732e-05, |
|
"loss": 1.1603, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.8673417721518988, |
|
"grad_norm": 0.5799160300815901, |
|
"learning_rate": 3.148314933498886e-05, |
|
"loss": 1.1554, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 1.871392405063291, |
|
"grad_norm": 0.6842509393087578, |
|
"learning_rate": 3.143663228021431e-05, |
|
"loss": 1.1367, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.8754430379746836, |
|
"grad_norm": 0.6103866231555901, |
|
"learning_rate": 3.1390023116375624e-05, |
|
"loss": 1.1601, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 1.879493670886076, |
|
"grad_norm": 0.6329168017790784, |
|
"learning_rate": 3.134332221885661e-05, |
|
"loss": 1.148, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 1.8835443037974684, |
|
"grad_norm": 0.5999221044258835, |
|
"learning_rate": 3.129652996377987e-05, |
|
"loss": 1.1717, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.8875949367088607, |
|
"grad_norm": 0.5724615094336478, |
|
"learning_rate": 3.12496467280038e-05, |
|
"loss": 1.1371, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 1.8916455696202532, |
|
"grad_norm": 0.7583895254452035, |
|
"learning_rate": 3.120267288911952e-05, |
|
"loss": 1.1445, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 1.8956962025316457, |
|
"grad_norm": 0.5973397429661461, |
|
"learning_rate": 3.11556088254479e-05, |
|
"loss": 1.1683, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.899746835443038, |
|
"grad_norm": 0.7630028389627302, |
|
"learning_rate": 3.11084549160364e-05, |
|
"loss": 1.1684, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 1.9037974683544303, |
|
"grad_norm": 0.6602007306957379, |
|
"learning_rate": 3.106121154065615e-05, |
|
"loss": 1.1219, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.9078481012658228, |
|
"grad_norm": 0.6983417866368752, |
|
"learning_rate": 3.1013879079798805e-05, |
|
"loss": 1.1854, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 1.9118987341772153, |
|
"grad_norm": 0.7174141219318854, |
|
"learning_rate": 3.096645791467348e-05, |
|
"loss": 1.1487, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 1.9159493670886076, |
|
"grad_norm": 0.6556679814504864, |
|
"learning_rate": 3.091894842720373e-05, |
|
"loss": 1.1607, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.6482308437040363, |
|
"learning_rate": 3.0871351000024425e-05, |
|
"loss": 1.174, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 1.9240506329113924, |
|
"grad_norm": 0.6474383812411197, |
|
"learning_rate": 3.0823666016478716e-05, |
|
"loss": 1.1505, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.928101265822785, |
|
"grad_norm": 0.5735527611864775, |
|
"learning_rate": 3.0775893860614896e-05, |
|
"loss": 1.1482, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 1.9321518987341773, |
|
"grad_norm": 0.6119866453799896, |
|
"learning_rate": 3.0728034917183336e-05, |
|
"loss": 1.1327, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 1.9362025316455695, |
|
"grad_norm": 0.6677082200262521, |
|
"learning_rate": 3.06800895716334e-05, |
|
"loss": 1.1693, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 1.940253164556962, |
|
"grad_norm": 0.6834439231658457, |
|
"learning_rate": 3.063205821011029e-05, |
|
"loss": 1.1565, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 1.9443037974683546, |
|
"grad_norm": 0.5452361050297119, |
|
"learning_rate": 3.0583941219452016e-05, |
|
"loss": 1.1684, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.9483544303797469, |
|
"grad_norm": 0.5856953069100633, |
|
"learning_rate": 3.053573898718618e-05, |
|
"loss": 1.1719, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 1.9524050632911392, |
|
"grad_norm": 0.5304289185360662, |
|
"learning_rate": 3.0487451901526956e-05, |
|
"loss": 1.1636, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 1.9564556962025317, |
|
"grad_norm": 0.5671630949874984, |
|
"learning_rate": 3.0439080351371875e-05, |
|
"loss": 1.152, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 1.9605063291139242, |
|
"grad_norm": 0.5281688026683441, |
|
"learning_rate": 3.0390624726298764e-05, |
|
"loss": 1.1645, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 1.9645569620253165, |
|
"grad_norm": 0.6178649432350114, |
|
"learning_rate": 3.034208541656255e-05, |
|
"loss": 1.1273, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.9686075949367088, |
|
"grad_norm": 0.48743991020474353, |
|
"learning_rate": 3.029346281309218e-05, |
|
"loss": 1.1654, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.9726582278481013, |
|
"grad_norm": 0.5103761656925145, |
|
"learning_rate": 3.0244757307487415e-05, |
|
"loss": 1.1507, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 1.9767088607594938, |
|
"grad_norm": 0.5600543832567956, |
|
"learning_rate": 3.019596929201569e-05, |
|
"loss": 1.1385, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 1.980759493670886, |
|
"grad_norm": 0.4784001145700006, |
|
"learning_rate": 3.0147099159608985e-05, |
|
"loss": 1.131, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 1.9848101265822784, |
|
"grad_norm": 0.5907135370866342, |
|
"learning_rate": 3.0098147303860616e-05, |
|
"loss": 1.1654, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.988860759493671, |
|
"grad_norm": 0.5067774231546158, |
|
"learning_rate": 3.0049114119022117e-05, |
|
"loss": 1.1562, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 1.9929113924050634, |
|
"grad_norm": 0.5385332084918243, |
|
"learning_rate": 3.0000000000000004e-05, |
|
"loss": 1.1582, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.9969620253164557, |
|
"grad_norm": 0.5354799323614053, |
|
"learning_rate": 2.995080534235264e-05, |
|
"loss": 1.1476, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 2.001012658227848, |
|
"grad_norm": 0.5801635181454197, |
|
"learning_rate": 2.9901530542287044e-05, |
|
"loss": 1.1345, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 2.0050632911392405, |
|
"grad_norm": 0.6895035224297531, |
|
"learning_rate": 2.9852175996655676e-05, |
|
"loss": 1.0331, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 2.009113924050633, |
|
"grad_norm": 1.2092320313673883, |
|
"learning_rate": 2.980274210295326e-05, |
|
"loss": 1.0262, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 2.013164556962025, |
|
"grad_norm": 0.7835402868546538, |
|
"learning_rate": 2.9753229259313578e-05, |
|
"loss": 1.0096, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 2.0172151898734176, |
|
"grad_norm": 0.7848931048544997, |
|
"learning_rate": 2.9703637864506274e-05, |
|
"loss": 1.0536, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 2.02126582278481, |
|
"grad_norm": 0.7003899744892192, |
|
"learning_rate": 2.965396831793362e-05, |
|
"loss": 1.0021, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 2.0253164556962027, |
|
"grad_norm": 0.8693436740092586, |
|
"learning_rate": 2.9604221019627316e-05, |
|
"loss": 0.9979, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.0293670886075947, |
|
"grad_norm": 0.9280926834223986, |
|
"learning_rate": 2.955439637024526e-05, |
|
"loss": 0.9974, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 2.0334177215189873, |
|
"grad_norm": 0.6884498997135877, |
|
"learning_rate": 2.9504494771068334e-05, |
|
"loss": 1.0316, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 2.0374683544303798, |
|
"grad_norm": 0.7228154705924649, |
|
"learning_rate": 2.9454516623997156e-05, |
|
"loss": 1.0244, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 2.0415189873417723, |
|
"grad_norm": 0.9893516794468438, |
|
"learning_rate": 2.9404462331548847e-05, |
|
"loss": 0.9767, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 2.0455696202531644, |
|
"grad_norm": 0.6143624021765863, |
|
"learning_rate": 2.93543322968538e-05, |
|
"loss": 0.9979, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 2.049620253164557, |
|
"grad_norm": 0.849996589920869, |
|
"learning_rate": 2.9304126923652428e-05, |
|
"loss": 1.0144, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 2.0536708860759494, |
|
"grad_norm": 0.661618423664458, |
|
"learning_rate": 2.9253846616291896e-05, |
|
"loss": 1.0203, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 2.057721518987342, |
|
"grad_norm": 0.6679045817431669, |
|
"learning_rate": 2.9203491779722896e-05, |
|
"loss": 1.01, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 2.061772151898734, |
|
"grad_norm": 0.6661145201550285, |
|
"learning_rate": 2.9153062819496357e-05, |
|
"loss": 1.0222, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 2.0658227848101265, |
|
"grad_norm": 0.6387559488801365, |
|
"learning_rate": 2.9102560141760178e-05, |
|
"loss": 1.0156, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.069873417721519, |
|
"grad_norm": 0.6235425032168542, |
|
"learning_rate": 2.9051984153256004e-05, |
|
"loss": 1.0281, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 2.0739240506329115, |
|
"grad_norm": 0.6838910243587044, |
|
"learning_rate": 2.900133526131588e-05, |
|
"loss": 1.0026, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 2.0779746835443036, |
|
"grad_norm": 0.5397016077246916, |
|
"learning_rate": 2.8950613873859025e-05, |
|
"loss": 1.0188, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 2.082025316455696, |
|
"grad_norm": 0.696784129644162, |
|
"learning_rate": 2.8899820399388515e-05, |
|
"loss": 1.0004, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 2.0860759493670886, |
|
"grad_norm": 0.7006685341361082, |
|
"learning_rate": 2.8848955246988012e-05, |
|
"loss": 1.0083, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 2.090126582278481, |
|
"grad_norm": 0.5936753928696896, |
|
"learning_rate": 2.879801882631847e-05, |
|
"loss": 1.0294, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 2.094177215189873, |
|
"grad_norm": 0.7012854125074445, |
|
"learning_rate": 2.8747011547614808e-05, |
|
"loss": 1.0092, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 2.0982278481012657, |
|
"grad_norm": 0.6033585773045091, |
|
"learning_rate": 2.8695933821682635e-05, |
|
"loss": 1.0197, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 2.1022784810126582, |
|
"grad_norm": 0.6766770211171466, |
|
"learning_rate": 2.864478605989494e-05, |
|
"loss": 1.0186, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 2.1063291139240508, |
|
"grad_norm": 0.6648314771848939, |
|
"learning_rate": 2.8593568674188765e-05, |
|
"loss": 1.0343, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.110379746835443, |
|
"grad_norm": 0.6719551472239573, |
|
"learning_rate": 2.8542282077061892e-05, |
|
"loss": 1.0088, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 2.1144303797468353, |
|
"grad_norm": 0.6003514076267086, |
|
"learning_rate": 2.8490926681569523e-05, |
|
"loss": 1.0144, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 2.118481012658228, |
|
"grad_norm": 0.6670270325307559, |
|
"learning_rate": 2.8439502901320956e-05, |
|
"loss": 1.0294, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 2.1225316455696204, |
|
"grad_norm": 0.6078921085380056, |
|
"learning_rate": 2.8388011150476237e-05, |
|
"loss": 0.992, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 2.1265822784810124, |
|
"grad_norm": 0.693062430169994, |
|
"learning_rate": 2.8336451843742866e-05, |
|
"loss": 1.0206, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 2.130632911392405, |
|
"grad_norm": 0.6481387873992199, |
|
"learning_rate": 2.8284825396372387e-05, |
|
"loss": 1.0401, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 2.1346835443037975, |
|
"grad_norm": 0.5641535422210479, |
|
"learning_rate": 2.8233132224157132e-05, |
|
"loss": 1.0356, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 2.13873417721519, |
|
"grad_norm": 0.5311822158331811, |
|
"learning_rate": 2.8181372743426805e-05, |
|
"loss": 1.0133, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 2.1427848101265825, |
|
"grad_norm": 0.6313724261130622, |
|
"learning_rate": 2.8129547371045128e-05, |
|
"loss": 1.0211, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 2.1468354430379746, |
|
"grad_norm": 0.5755198026265722, |
|
"learning_rate": 2.8077656524406534e-05, |
|
"loss": 1.0138, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.150886075949367, |
|
"grad_norm": 0.6343596980550483, |
|
"learning_rate": 2.802570062143278e-05, |
|
"loss": 0.9967, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 2.1549367088607596, |
|
"grad_norm": 0.6677506920589293, |
|
"learning_rate": 2.7973680080569555e-05, |
|
"loss": 1.0225, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 2.1589873417721517, |
|
"grad_norm": 0.5670298710242881, |
|
"learning_rate": 2.792159532078314e-05, |
|
"loss": 1.0073, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 2.163037974683544, |
|
"grad_norm": 0.6701666402693496, |
|
"learning_rate": 2.7869446761557033e-05, |
|
"loss": 1.0165, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 2.1670886075949367, |
|
"grad_norm": 0.5840554230350358, |
|
"learning_rate": 2.781723482288857e-05, |
|
"loss": 1.0297, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 2.1711392405063292, |
|
"grad_norm": 0.5462457155921613, |
|
"learning_rate": 2.7764959925285517e-05, |
|
"loss": 1.0158, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 2.1751898734177217, |
|
"grad_norm": 0.5948879552367591, |
|
"learning_rate": 2.771262248976272e-05, |
|
"loss": 1.0102, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 2.179240506329114, |
|
"grad_norm": 0.5709513962588751, |
|
"learning_rate": 2.7660222937838677e-05, |
|
"loss": 1.0146, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 2.1832911392405063, |
|
"grad_norm": 0.7105521352683684, |
|
"learning_rate": 2.7607761691532186e-05, |
|
"loss": 1.005, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 2.187341772151899, |
|
"grad_norm": 0.5288298496430593, |
|
"learning_rate": 2.7555239173358916e-05, |
|
"loss": 1.0039, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.191392405063291, |
|
"grad_norm": 0.6066533775739462, |
|
"learning_rate": 2.7502655806328e-05, |
|
"loss": 1.0463, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 2.1954430379746834, |
|
"grad_norm": 0.4870070750405779, |
|
"learning_rate": 2.7450012013938648e-05, |
|
"loss": 1.0132, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 2.199493670886076, |
|
"grad_norm": 0.5738040560093357, |
|
"learning_rate": 2.739730822017673e-05, |
|
"loss": 1.0178, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 2.2035443037974685, |
|
"grad_norm": 0.5197347689950114, |
|
"learning_rate": 2.7344544849511355e-05, |
|
"loss": 0.9993, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 2.207594936708861, |
|
"grad_norm": 0.5038843717132108, |
|
"learning_rate": 2.7291722326891456e-05, |
|
"loss": 1.0041, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 2.211645569620253, |
|
"grad_norm": 0.66549808145693, |
|
"learning_rate": 2.723884107774236e-05, |
|
"loss": 1.0315, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 2.2156962025316456, |
|
"grad_norm": 0.4871339346103557, |
|
"learning_rate": 2.718590152796239e-05, |
|
"loss": 0.9958, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 2.219746835443038, |
|
"grad_norm": 0.6139299428669691, |
|
"learning_rate": 2.71329041039194e-05, |
|
"loss": 1.0172, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 2.22379746835443, |
|
"grad_norm": 0.5637856188863154, |
|
"learning_rate": 2.7079849232447357e-05, |
|
"loss": 1.0017, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 2.2278481012658227, |
|
"grad_norm": 0.5383416212276905, |
|
"learning_rate": 2.7026737340842895e-05, |
|
"loss": 1.0149, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.231898734177215, |
|
"grad_norm": 0.5595330744870007, |
|
"learning_rate": 2.697356885686189e-05, |
|
"loss": 1.0218, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 2.2359493670886077, |
|
"grad_norm": 0.589950590347505, |
|
"learning_rate": 2.6920344208716014e-05, |
|
"loss": 1.0334, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 0.5715264406692745, |
|
"learning_rate": 2.6867063825069252e-05, |
|
"loss": 1.0204, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 2.2440506329113923, |
|
"grad_norm": 0.7535338185706086, |
|
"learning_rate": 2.6813728135034494e-05, |
|
"loss": 1.0272, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 2.248101265822785, |
|
"grad_norm": 0.5379369784813346, |
|
"learning_rate": 2.6760337568170056e-05, |
|
"loss": 0.9897, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 2.2521518987341773, |
|
"grad_norm": 0.7116159618042617, |
|
"learning_rate": 2.6706892554476226e-05, |
|
"loss": 1.0193, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 2.2562025316455694, |
|
"grad_norm": 0.4679924378502827, |
|
"learning_rate": 2.6653393524391795e-05, |
|
"loss": 1.0257, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 2.260253164556962, |
|
"grad_norm": 0.5637566920004994, |
|
"learning_rate": 2.6599840908790592e-05, |
|
"loss": 1.0109, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 2.2643037974683544, |
|
"grad_norm": 0.4485705339309698, |
|
"learning_rate": 2.6546235138978028e-05, |
|
"loss": 1.0348, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 2.268354430379747, |
|
"grad_norm": 0.5334499230558688, |
|
"learning_rate": 2.6492576646687597e-05, |
|
"loss": 1.0228, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.2724050632911394, |
|
"grad_norm": 0.5260005703080777, |
|
"learning_rate": 2.6438865864077425e-05, |
|
"loss": 1.0199, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 2.2764556962025315, |
|
"grad_norm": 0.520172274194265, |
|
"learning_rate": 2.6385103223726766e-05, |
|
"loss": 1.0082, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 2.280506329113924, |
|
"grad_norm": 0.5094704901558924, |
|
"learning_rate": 2.6331289158632537e-05, |
|
"loss": 1.0314, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 2.2845569620253166, |
|
"grad_norm": 0.5936032272916024, |
|
"learning_rate": 2.6277424102205817e-05, |
|
"loss": 1.0369, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 2.2886075949367086, |
|
"grad_norm": 0.5108988909558086, |
|
"learning_rate": 2.6223508488268374e-05, |
|
"loss": 1.0191, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 2.292658227848101, |
|
"grad_norm": 0.6587729506277713, |
|
"learning_rate": 2.6169542751049148e-05, |
|
"loss": 1.0309, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 2.2967088607594937, |
|
"grad_norm": 0.5440398970206946, |
|
"learning_rate": 2.6115527325180754e-05, |
|
"loss": 1.024, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 2.300759493670886, |
|
"grad_norm": 0.5578029007796318, |
|
"learning_rate": 2.606146264569603e-05, |
|
"loss": 1.0219, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 2.3048101265822787, |
|
"grad_norm": 0.6903894779635844, |
|
"learning_rate": 2.6007349148024447e-05, |
|
"loss": 1.0097, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 2.3088607594936708, |
|
"grad_norm": 0.5415806936625543, |
|
"learning_rate": 2.5953187267988694e-05, |
|
"loss": 1.0181, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.3129113924050633, |
|
"grad_norm": 0.5242665184382573, |
|
"learning_rate": 2.5898977441801097e-05, |
|
"loss": 1.0231, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 2.316962025316456, |
|
"grad_norm": 0.5590023235404082, |
|
"learning_rate": 2.584472010606015e-05, |
|
"loss": 1.021, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 2.321012658227848, |
|
"grad_norm": 0.5392259827932185, |
|
"learning_rate": 2.5790415697746976e-05, |
|
"loss": 1.0194, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 2.3250632911392404, |
|
"grad_norm": 0.5975528088229662, |
|
"learning_rate": 2.5736064654221808e-05, |
|
"loss": 1.0297, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 2.329113924050633, |
|
"grad_norm": 0.5079216009835216, |
|
"learning_rate": 2.568166741322048e-05, |
|
"loss": 1.0346, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 2.3331645569620254, |
|
"grad_norm": 0.5219122239463513, |
|
"learning_rate": 2.56272244128509e-05, |
|
"loss": 1.0632, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 2.337215189873418, |
|
"grad_norm": 0.5139185306280853, |
|
"learning_rate": 2.55727360915895e-05, |
|
"loss": 1.0164, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 2.34126582278481, |
|
"grad_norm": 0.49193173960610537, |
|
"learning_rate": 2.5518202888277734e-05, |
|
"loss": 1.0296, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 2.3453164556962025, |
|
"grad_norm": 0.46691870069933034, |
|
"learning_rate": 2.5463625242118523e-05, |
|
"loss": 1.0319, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 2.349367088607595, |
|
"grad_norm": 0.5523887632253133, |
|
"learning_rate": 2.5409003592672723e-05, |
|
"loss": 0.9987, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.353417721518987, |
|
"grad_norm": 0.516921810911513, |
|
"learning_rate": 2.535433837985559e-05, |
|
"loss": 1.0275, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 2.3574683544303796, |
|
"grad_norm": 0.5900062953091885, |
|
"learning_rate": 2.529963004393324e-05, |
|
"loss": 1.0417, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 2.361518987341772, |
|
"grad_norm": 0.5612954061813461, |
|
"learning_rate": 2.524487902551908e-05, |
|
"loss": 1.0299, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 2.3655696202531646, |
|
"grad_norm": 0.5381203488988708, |
|
"learning_rate": 2.519008576557029e-05, |
|
"loss": 1.0352, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 2.369620253164557, |
|
"grad_norm": 0.5333543476695336, |
|
"learning_rate": 2.5135250705384254e-05, |
|
"loss": 1.0333, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 2.3736708860759492, |
|
"grad_norm": 0.5273404908783355, |
|
"learning_rate": 2.5080374286595007e-05, |
|
"loss": 1.0294, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 2.3777215189873417, |
|
"grad_norm": 0.5245383463434636, |
|
"learning_rate": 2.5025456951169677e-05, |
|
"loss": 1.0293, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 2.3817721518987343, |
|
"grad_norm": 0.554681220598988, |
|
"learning_rate": 2.4970499141404942e-05, |
|
"loss": 1.0303, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 2.3858227848101268, |
|
"grad_norm": 0.5036996184253952, |
|
"learning_rate": 2.491550129992345e-05, |
|
"loss": 1.0401, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 2.389873417721519, |
|
"grad_norm": 0.5229637027529087, |
|
"learning_rate": 2.486046386967024e-05, |
|
"loss": 1.0207, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.3939240506329114, |
|
"grad_norm": 0.491384332679668, |
|
"learning_rate": 2.4805387293909214e-05, |
|
"loss": 1.0406, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 2.397974683544304, |
|
"grad_norm": 0.5255224163281365, |
|
"learning_rate": 2.4750272016219552e-05, |
|
"loss": 1.0229, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 2.4020253164556964, |
|
"grad_norm": 0.5617986927899088, |
|
"learning_rate": 2.4695118480492114e-05, |
|
"loss": 1.0617, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 2.4060759493670885, |
|
"grad_norm": 0.4920215239649119, |
|
"learning_rate": 2.4639927130925898e-05, |
|
"loss": 1.046, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 2.410126582278481, |
|
"grad_norm": 0.49492193986034355, |
|
"learning_rate": 2.458469841202444e-05, |
|
"loss": 1.014, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 2.4141772151898735, |
|
"grad_norm": 0.5508439469870827, |
|
"learning_rate": 2.452943276859226e-05, |
|
"loss": 1.0333, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 2.418227848101266, |
|
"grad_norm": 0.44875249990939386, |
|
"learning_rate": 2.447413064573125e-05, |
|
"loss": 1.0272, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 2.422278481012658, |
|
"grad_norm": 0.5517701170144657, |
|
"learning_rate": 2.4418792488837095e-05, |
|
"loss": 1.0136, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 2.4263291139240506, |
|
"grad_norm": 0.4756375671739574, |
|
"learning_rate": 2.4363418743595713e-05, |
|
"loss": 1.0344, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 2.430379746835443, |
|
"grad_norm": 0.6074358494848272, |
|
"learning_rate": 2.430800985597963e-05, |
|
"loss": 1.0136, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.4344303797468356, |
|
"grad_norm": 0.45848388393615697, |
|
"learning_rate": 2.4252566272244415e-05, |
|
"loss": 0.9962, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 2.4384810126582277, |
|
"grad_norm": 0.5578498095919927, |
|
"learning_rate": 2.4197088438925063e-05, |
|
"loss": 1.0033, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 2.44253164556962, |
|
"grad_norm": 0.447137712656289, |
|
"learning_rate": 2.4141576802832417e-05, |
|
"loss": 1.0328, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 2.4465822784810127, |
|
"grad_norm": 0.49673531889142347, |
|
"learning_rate": 2.408603181104957e-05, |
|
"loss": 1.0188, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 2.4506329113924052, |
|
"grad_norm": 0.48139903004180523, |
|
"learning_rate": 2.4030453910928245e-05, |
|
"loss": 1.0293, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 2.4546835443037973, |
|
"grad_norm": 0.46525581296497814, |
|
"learning_rate": 2.397484355008521e-05, |
|
"loss": 1.0048, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 2.45873417721519, |
|
"grad_norm": 0.5324924178702928, |
|
"learning_rate": 2.3919201176398662e-05, |
|
"loss": 1.0101, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 2.4627848101265823, |
|
"grad_norm": 0.49832008960752955, |
|
"learning_rate": 2.3863527238004633e-05, |
|
"loss": 1.0176, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 2.466835443037975, |
|
"grad_norm": 0.47135857514656343, |
|
"learning_rate": 2.380782218329337e-05, |
|
"loss": 1.0128, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 2.470886075949367, |
|
"grad_norm": 0.48960084845830415, |
|
"learning_rate": 2.3752086460905725e-05, |
|
"loss": 1.0605, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.4749367088607594, |
|
"grad_norm": 0.4981175924190684, |
|
"learning_rate": 2.3696320519729544e-05, |
|
"loss": 1.0373, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 2.478987341772152, |
|
"grad_norm": 0.6048582727269858, |
|
"learning_rate": 2.3640524808896045e-05, |
|
"loss": 1.044, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 2.4830379746835445, |
|
"grad_norm": 0.5078286734838149, |
|
"learning_rate": 2.3584699777776222e-05, |
|
"loss": 1.0313, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 2.4870886075949366, |
|
"grad_norm": 0.5191819139070797, |
|
"learning_rate": 2.3528845875977195e-05, |
|
"loss": 1.0536, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 2.491139240506329, |
|
"grad_norm": 0.5507895321263605, |
|
"learning_rate": 2.3472963553338614e-05, |
|
"loss": 1.0121, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 2.4951898734177216, |
|
"grad_norm": 0.5035978041036229, |
|
"learning_rate": 2.341705325992901e-05, |
|
"loss": 1.0136, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 2.499240506329114, |
|
"grad_norm": 0.5679857665814879, |
|
"learning_rate": 2.336111544604222e-05, |
|
"loss": 1.0064, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 2.503291139240506, |
|
"grad_norm": 0.5503633480644989, |
|
"learning_rate": 2.33051505621937e-05, |
|
"loss": 1.0353, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 2.5073417721518987, |
|
"grad_norm": 0.5375922050503924, |
|
"learning_rate": 2.324915905911693e-05, |
|
"loss": 1.0052, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 2.511392405063291, |
|
"grad_norm": 0.7486033469161537, |
|
"learning_rate": 2.319314138775977e-05, |
|
"loss": 1.0206, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.5154430379746833, |
|
"grad_norm": 0.5257536054943663, |
|
"learning_rate": 2.3137097999280856e-05, |
|
"loss": 1.0254, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 2.519493670886076, |
|
"grad_norm": 0.5730511276583133, |
|
"learning_rate": 2.308102934504593e-05, |
|
"loss": 1.0299, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 2.5235443037974683, |
|
"grad_norm": 0.5491278780233214, |
|
"learning_rate": 2.3024935876624222e-05, |
|
"loss": 1.0185, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 2.527594936708861, |
|
"grad_norm": 0.5462466952331015, |
|
"learning_rate": 2.2968818045784813e-05, |
|
"loss": 1.0012, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 2.5316455696202533, |
|
"grad_norm": 0.5429754616549164, |
|
"learning_rate": 2.2912676304493006e-05, |
|
"loss": 1.0038, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 2.5356962025316454, |
|
"grad_norm": 0.6026974029153855, |
|
"learning_rate": 2.2856511104906668e-05, |
|
"loss": 1.0182, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 2.539746835443038, |
|
"grad_norm": 0.5313924945353171, |
|
"learning_rate": 2.2800322899372586e-05, |
|
"loss": 1.0375, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 2.5437974683544304, |
|
"grad_norm": 0.5429977017183559, |
|
"learning_rate": 2.2744112140422844e-05, |
|
"loss": 1.0266, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 2.547848101265823, |
|
"grad_norm": 0.5524829264994087, |
|
"learning_rate": 2.2687879280771177e-05, |
|
"loss": 1.0266, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 2.5518987341772155, |
|
"grad_norm": 0.5264496946029424, |
|
"learning_rate": 2.26316247733093e-05, |
|
"loss": 1.0141, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.5559493670886075, |
|
"grad_norm": 0.5143315844952089, |
|
"learning_rate": 2.257534907110328e-05, |
|
"loss": 1.0154, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 0.49100271539581425, |
|
"learning_rate": 2.2519052627389882e-05, |
|
"loss": 1.0156, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 2.5640506329113926, |
|
"grad_norm": 0.5371490440713882, |
|
"learning_rate": 2.246273589557294e-05, |
|
"loss": 0.9955, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 2.5681012658227846, |
|
"grad_norm": 0.511997600605205, |
|
"learning_rate": 2.240639932921966e-05, |
|
"loss": 1.0402, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 2.572151898734177, |
|
"grad_norm": 0.5618922745501259, |
|
"learning_rate": 2.2350043382056995e-05, |
|
"loss": 1.0064, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 2.5762025316455697, |
|
"grad_norm": 0.5574722139404477, |
|
"learning_rate": 2.2293668507968015e-05, |
|
"loss": 1.025, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 2.580253164556962, |
|
"grad_norm": 0.5882415460148407, |
|
"learning_rate": 2.2237275160988186e-05, |
|
"loss": 1.0159, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 2.5843037974683547, |
|
"grad_norm": 0.49081413260467366, |
|
"learning_rate": 2.2180863795301787e-05, |
|
"loss": 1.0296, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 2.5883544303797468, |
|
"grad_norm": 0.5838467723866013, |
|
"learning_rate": 2.212443486523819e-05, |
|
"loss": 1.0728, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 2.5924050632911393, |
|
"grad_norm": 0.6170912260429801, |
|
"learning_rate": 2.2067988825268243e-05, |
|
"loss": 1.013, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.596455696202532, |
|
"grad_norm": 0.47248179185113337, |
|
"learning_rate": 2.2011526130000596e-05, |
|
"loss": 1.0317, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 2.600506329113924, |
|
"grad_norm": 0.5681806819560992, |
|
"learning_rate": 2.1955047234178038e-05, |
|
"loss": 1.0247, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 2.6045569620253164, |
|
"grad_norm": 0.463708228533855, |
|
"learning_rate": 2.1898552592673825e-05, |
|
"loss": 1.018, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 2.608607594936709, |
|
"grad_norm": 0.5325929881207099, |
|
"learning_rate": 2.184204266048803e-05, |
|
"loss": 1.0404, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 2.6126582278481014, |
|
"grad_norm": 0.490401810929384, |
|
"learning_rate": 2.1785517892743887e-05, |
|
"loss": 1.0289, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 2.616708860759494, |
|
"grad_norm": 0.4573573157201373, |
|
"learning_rate": 2.17289787446841e-05, |
|
"loss": 1.0195, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 2.620759493670886, |
|
"grad_norm": 0.4861001409780556, |
|
"learning_rate": 2.1672425671667198e-05, |
|
"loss": 1.0064, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 2.6248101265822785, |
|
"grad_norm": 0.4728049689992945, |
|
"learning_rate": 2.161585912916385e-05, |
|
"loss": 1.0197, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 2.628860759493671, |
|
"grad_norm": 0.4670732151790282, |
|
"learning_rate": 2.1559279572753214e-05, |
|
"loss": 1.0012, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 2.632911392405063, |
|
"grad_norm": 0.44750593282068835, |
|
"learning_rate": 2.1502687458119268e-05, |
|
"loss": 1.0091, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.6369620253164556, |
|
"grad_norm": 0.540672614135796, |
|
"learning_rate": 2.1446083241047116e-05, |
|
"loss": 1.0211, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 2.641012658227848, |
|
"grad_norm": 0.47553678903831514, |
|
"learning_rate": 2.1389467377419333e-05, |
|
"loss": 1.0314, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 2.6450632911392407, |
|
"grad_norm": 0.4687879062052804, |
|
"learning_rate": 2.133284032321232e-05, |
|
"loss": 1.015, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 2.649113924050633, |
|
"grad_norm": 0.4761124488619028, |
|
"learning_rate": 2.1276202534492566e-05, |
|
"loss": 1.0146, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 2.6531645569620252, |
|
"grad_norm": 0.4877113503656656, |
|
"learning_rate": 2.121955446741306e-05, |
|
"loss": 1.034, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 2.6572151898734178, |
|
"grad_norm": 0.4630754894238843, |
|
"learning_rate": 2.1162896578209517e-05, |
|
"loss": 1.0341, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 2.6612658227848103, |
|
"grad_norm": 0.45848156601819184, |
|
"learning_rate": 2.1106229323196813e-05, |
|
"loss": 1.0341, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 2.6653164556962023, |
|
"grad_norm": 0.4966310123748713, |
|
"learning_rate": 2.1049553158765214e-05, |
|
"loss": 1.0003, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 2.669367088607595, |
|
"grad_norm": 0.4554447671259762, |
|
"learning_rate": 2.0992868541376764e-05, |
|
"loss": 1.0126, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 2.6734177215189874, |
|
"grad_norm": 0.46875537996124655, |
|
"learning_rate": 2.093617592756158e-05, |
|
"loss": 1.0067, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.67746835443038, |
|
"grad_norm": 0.4976944808717196, |
|
"learning_rate": 2.0879475773914167e-05, |
|
"loss": 1.0096, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 2.6815189873417724, |
|
"grad_norm": 0.5446925688077884, |
|
"learning_rate": 2.082276853708978e-05, |
|
"loss": 1.0207, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 2.6855696202531645, |
|
"grad_norm": 0.5008286427817888, |
|
"learning_rate": 2.076605467380071e-05, |
|
"loss": 1.0227, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 2.689620253164557, |
|
"grad_norm": 0.5332582571428496, |
|
"learning_rate": 2.0709334640812613e-05, |
|
"loss": 1.037, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 2.6936708860759495, |
|
"grad_norm": 0.5416691768983506, |
|
"learning_rate": 2.0652608894940824e-05, |
|
"loss": 1.0335, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 2.6977215189873416, |
|
"grad_norm": 0.5041289288935831, |
|
"learning_rate": 2.0595877893046722e-05, |
|
"loss": 1.0402, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 2.701772151898734, |
|
"grad_norm": 0.5307269797916867, |
|
"learning_rate": 2.0539142092033985e-05, |
|
"loss": 1.0244, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 2.7058227848101266, |
|
"grad_norm": 0.5026127996377834, |
|
"learning_rate": 2.048240194884496e-05, |
|
"loss": 0.9953, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 2.709873417721519, |
|
"grad_norm": 0.4553480580546981, |
|
"learning_rate": 2.042565792045695e-05, |
|
"loss": 1.0152, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 2.7139240506329116, |
|
"grad_norm": 0.5097429721013352, |
|
"learning_rate": 2.036891046387857e-05, |
|
"loss": 1.0411, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.7179746835443037, |
|
"grad_norm": 0.49531667952964054, |
|
"learning_rate": 2.0312160036146036e-05, |
|
"loss": 1.0445, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 2.7220253164556962, |
|
"grad_norm": 0.42789543896033205, |
|
"learning_rate": 2.025540709431948e-05, |
|
"loss": 1.0534, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 2.7260759493670887, |
|
"grad_norm": 0.47993803280822295, |
|
"learning_rate": 2.0198652095479298e-05, |
|
"loss": 1.0255, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 2.730126582278481, |
|
"grad_norm": 0.5429741328275045, |
|
"learning_rate": 2.014189549672245e-05, |
|
"loss": 1.0378, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 2.7341772151898733, |
|
"grad_norm": 0.45009794669001796, |
|
"learning_rate": 2.0085137755158776e-05, |
|
"loss": 1.0016, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 2.738227848101266, |
|
"grad_norm": 0.4671981915242967, |
|
"learning_rate": 2.0028379327907327e-05, |
|
"loss": 1.0002, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 2.7422784810126584, |
|
"grad_norm": 0.5252792905726302, |
|
"learning_rate": 1.9971620672092676e-05, |
|
"loss": 1.0305, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 2.746329113924051, |
|
"grad_norm": 0.47379738750543554, |
|
"learning_rate": 1.991486224484123e-05, |
|
"loss": 1.0186, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 2.750379746835443, |
|
"grad_norm": 0.49891873613816495, |
|
"learning_rate": 1.985810450327756e-05, |
|
"loss": 1.0256, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 2.7544303797468355, |
|
"grad_norm": 0.4907339228346661, |
|
"learning_rate": 1.9801347904520706e-05, |
|
"loss": 1.0284, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.758481012658228, |
|
"grad_norm": 0.4491712581937517, |
|
"learning_rate": 1.974459290568053e-05, |
|
"loss": 1.0277, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 2.76253164556962, |
|
"grad_norm": 0.5146975000104856, |
|
"learning_rate": 1.968783996385397e-05, |
|
"loss": 1.0364, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 2.7665822784810126, |
|
"grad_norm": 0.5481977786317666, |
|
"learning_rate": 1.963108953612143e-05, |
|
"loss": 0.9977, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 2.770632911392405, |
|
"grad_norm": 0.4144066106189655, |
|
"learning_rate": 1.9574342079543056e-05, |
|
"loss": 1.0169, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 2.7746835443037976, |
|
"grad_norm": 0.5897906663111407, |
|
"learning_rate": 1.9517598051155046e-05, |
|
"loss": 1.0601, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 2.77873417721519, |
|
"grad_norm": 0.45683625437849484, |
|
"learning_rate": 1.9460857907966025e-05, |
|
"loss": 1.0058, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 2.782784810126582, |
|
"grad_norm": 0.5445950406714416, |
|
"learning_rate": 1.9404122106953285e-05, |
|
"loss": 1.0154, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 2.7868354430379747, |
|
"grad_norm": 0.5119743541531611, |
|
"learning_rate": 1.9347391105059176e-05, |
|
"loss": 1.0419, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 2.790886075949367, |
|
"grad_norm": 0.5246144317621387, |
|
"learning_rate": 1.92906653591874e-05, |
|
"loss": 1.0163, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 2.7949367088607593, |
|
"grad_norm": 0.43377413208229887, |
|
"learning_rate": 1.9233945326199295e-05, |
|
"loss": 1.0062, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.798987341772152, |
|
"grad_norm": 0.5331754347259767, |
|
"learning_rate": 1.917723146291022e-05, |
|
"loss": 1.0135, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 2.8030379746835443, |
|
"grad_norm": 0.47589680989101923, |
|
"learning_rate": 1.912052422608584e-05, |
|
"loss": 1.0201, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 2.807088607594937, |
|
"grad_norm": 0.5517993141730592, |
|
"learning_rate": 1.9063824072438428e-05, |
|
"loss": 1.0372, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 2.8111392405063294, |
|
"grad_norm": 0.47765324629891404, |
|
"learning_rate": 1.9007131458623246e-05, |
|
"loss": 1.0269, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 2.8151898734177214, |
|
"grad_norm": 0.5416044897936797, |
|
"learning_rate": 1.895044684123479e-05, |
|
"loss": 1.016, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 2.819240506329114, |
|
"grad_norm": 0.46436913098439336, |
|
"learning_rate": 1.8893770676803194e-05, |
|
"loss": 1.0334, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 2.8232911392405065, |
|
"grad_norm": 0.49974978237854684, |
|
"learning_rate": 1.8837103421790486e-05, |
|
"loss": 1.0144, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 2.8273417721518985, |
|
"grad_norm": 0.45433943286804207, |
|
"learning_rate": 1.8780445532586952e-05, |
|
"loss": 1.0318, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 2.831392405063291, |
|
"grad_norm": 0.4695881473857097, |
|
"learning_rate": 1.872379746550743e-05, |
|
"loss": 1.0252, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 2.8354430379746836, |
|
"grad_norm": 0.42926994962072956, |
|
"learning_rate": 1.866715967678769e-05, |
|
"loss": 1.016, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.839493670886076, |
|
"grad_norm": 0.4643111773017718, |
|
"learning_rate": 1.861053262258067e-05, |
|
"loss": 1.0189, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 2.8435443037974686, |
|
"grad_norm": 0.4518100051036514, |
|
"learning_rate": 1.8553916758952897e-05, |
|
"loss": 1.0263, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 2.8475949367088607, |
|
"grad_norm": 0.4437576751881414, |
|
"learning_rate": 1.8497312541880735e-05, |
|
"loss": 1.0072, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 2.851645569620253, |
|
"grad_norm": 0.45800741999273964, |
|
"learning_rate": 1.8440720427246786e-05, |
|
"loss": 1.0207, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 2.8556962025316457, |
|
"grad_norm": 0.48620023401260565, |
|
"learning_rate": 1.8384140870836157e-05, |
|
"loss": 1.0012, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 2.8597468354430378, |
|
"grad_norm": 0.4582693383521256, |
|
"learning_rate": 1.8327574328332806e-05, |
|
"loss": 1.0301, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 2.8637974683544303, |
|
"grad_norm": 0.46997718822847556, |
|
"learning_rate": 1.8271021255315906e-05, |
|
"loss": 1.0104, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 2.867848101265823, |
|
"grad_norm": 0.46927038684222133, |
|
"learning_rate": 1.8214482107256117e-05, |
|
"loss": 1.0108, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 2.8718987341772153, |
|
"grad_norm": 0.5041285787918498, |
|
"learning_rate": 1.8157957339511968e-05, |
|
"loss": 1.0078, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 2.875949367088608, |
|
"grad_norm": 0.5399136546666372, |
|
"learning_rate": 1.8101447407326182e-05, |
|
"loss": 1.0255, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 0.6383710075078183, |
|
"learning_rate": 1.8044952765821966e-05, |
|
"loss": 1.0281, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 2.8840506329113924, |
|
"grad_norm": 0.4923022559229843, |
|
"learning_rate": 1.7988473869999407e-05, |
|
"loss": 1.0547, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 2.888101265822785, |
|
"grad_norm": 0.5470922862641557, |
|
"learning_rate": 1.7932011174731764e-05, |
|
"loss": 1.0419, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 2.892151898734177, |
|
"grad_norm": 0.5745866326777408, |
|
"learning_rate": 1.7875565134761817e-05, |
|
"loss": 1.0017, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 2.8962025316455695, |
|
"grad_norm": 0.43578157644968657, |
|
"learning_rate": 1.7819136204698226e-05, |
|
"loss": 1.0331, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 2.900253164556962, |
|
"grad_norm": 0.5724417179682006, |
|
"learning_rate": 1.776272483901182e-05, |
|
"loss": 1.0214, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 2.9043037974683545, |
|
"grad_norm": 0.571811218243599, |
|
"learning_rate": 1.7706331492031995e-05, |
|
"loss": 1.028, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 2.908354430379747, |
|
"grad_norm": 0.5456423227749969, |
|
"learning_rate": 1.764995661794301e-05, |
|
"loss": 1.0152, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 2.912405063291139, |
|
"grad_norm": 0.48897761642746446, |
|
"learning_rate": 1.759360067078035e-05, |
|
"loss": 1.0215, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 2.9164556962025316, |
|
"grad_norm": 0.5419756217224223, |
|
"learning_rate": 1.7537264104427064e-05, |
|
"loss": 0.9806, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.920506329113924, |
|
"grad_norm": 0.4847201915368628, |
|
"learning_rate": 1.748094737261012e-05, |
|
"loss": 1.0118, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 2.9245569620253162, |
|
"grad_norm": 0.4847685304991527, |
|
"learning_rate": 1.7424650928896726e-05, |
|
"loss": 1.0264, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 2.9286075949367087, |
|
"grad_norm": 0.4756336198559765, |
|
"learning_rate": 1.7368375226690712e-05, |
|
"loss": 1.0292, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 2.9326582278481013, |
|
"grad_norm": 0.4605018370615728, |
|
"learning_rate": 1.731212071922883e-05, |
|
"loss": 1.0492, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 2.9367088607594938, |
|
"grad_norm": 0.4995563884295711, |
|
"learning_rate": 1.7255887859577156e-05, |
|
"loss": 1.0587, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 2.9407594936708863, |
|
"grad_norm": 0.4629606397352756, |
|
"learning_rate": 1.7199677100627427e-05, |
|
"loss": 1.0002, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 2.9448101265822784, |
|
"grad_norm": 0.5121702323100864, |
|
"learning_rate": 1.7143488895093343e-05, |
|
"loss": 1.0168, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 2.948860759493671, |
|
"grad_norm": 0.44509760128001585, |
|
"learning_rate": 1.7087323695506994e-05, |
|
"loss": 1.0075, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 2.9529113924050634, |
|
"grad_norm": 0.5066683404498322, |
|
"learning_rate": 1.7031181954215194e-05, |
|
"loss": 1.046, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 2.9569620253164555, |
|
"grad_norm": 0.4617198437054005, |
|
"learning_rate": 1.6975064123375788e-05, |
|
"loss": 1.0159, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.961012658227848, |
|
"grad_norm": 0.4803137142559452, |
|
"learning_rate": 1.6918970654954084e-05, |
|
"loss": 1.0125, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 2.9650632911392405, |
|
"grad_norm": 0.437251076824209, |
|
"learning_rate": 1.686290200071915e-05, |
|
"loss": 1.0451, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 2.969113924050633, |
|
"grad_norm": 0.47531563978040436, |
|
"learning_rate": 1.6806858612240234e-05, |
|
"loss": 1.0166, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 2.9731645569620255, |
|
"grad_norm": 0.46586860207167996, |
|
"learning_rate": 1.6750840940883078e-05, |
|
"loss": 1.0102, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 2.9772151898734176, |
|
"grad_norm": 0.4527089648702027, |
|
"learning_rate": 1.6694849437806305e-05, |
|
"loss": 1.0226, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 2.98126582278481, |
|
"grad_norm": 0.4437355051161407, |
|
"learning_rate": 1.663888455395778e-05, |
|
"loss": 1.0135, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 2.9853164556962026, |
|
"grad_norm": 0.42633032133116083, |
|
"learning_rate": 1.6582946740070995e-05, |
|
"loss": 1.0329, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 2.9893670886075947, |
|
"grad_norm": 0.4054174841533925, |
|
"learning_rate": 1.6527036446661396e-05, |
|
"loss": 1.0298, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 2.993417721518987, |
|
"grad_norm": 0.42404852229400325, |
|
"learning_rate": 1.6471154124022818e-05, |
|
"loss": 1.0101, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 2.9974683544303797, |
|
"grad_norm": 0.4846216035603012, |
|
"learning_rate": 1.6415300222223788e-05, |
|
"loss": 1.0124, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 3.0015189873417722, |
|
"grad_norm": 0.5100472648721559, |
|
"learning_rate": 1.6359475191103958e-05, |
|
"loss": 0.9767, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 3.0055696202531648, |
|
"grad_norm": 0.7940850340756402, |
|
"learning_rate": 1.6303679480270466e-05, |
|
"loss": 0.8815, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 3.009620253164557, |
|
"grad_norm": 0.8472581636317164, |
|
"learning_rate": 1.624791353909428e-05, |
|
"loss": 0.8533, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 3.0136708860759494, |
|
"grad_norm": 1.1196993014960313, |
|
"learning_rate": 1.619217781670663e-05, |
|
"loss": 0.8488, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 3.017721518987342, |
|
"grad_norm": 1.0153465184443184, |
|
"learning_rate": 1.6136472761995373e-05, |
|
"loss": 0.8605, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 3.0217721518987344, |
|
"grad_norm": 0.826462503480141, |
|
"learning_rate": 1.608079882360134e-05, |
|
"loss": 0.8859, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 3.0258227848101265, |
|
"grad_norm": 0.6484184668157622, |
|
"learning_rate": 1.60251564499148e-05, |
|
"loss": 0.8576, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 3.029873417721519, |
|
"grad_norm": 0.725425202944048, |
|
"learning_rate": 1.596954608907176e-05, |
|
"loss": 0.8382, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 3.0339240506329115, |
|
"grad_norm": 0.7616537997611557, |
|
"learning_rate": 1.591396818895043e-05, |
|
"loss": 0.8637, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 3.037974683544304, |
|
"grad_norm": 0.6462896283778906, |
|
"learning_rate": 1.585842319716759e-05, |
|
"loss": 0.8703, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 3.042025316455696, |
|
"grad_norm": 0.7220316010693504, |
|
"learning_rate": 1.5802911561074944e-05, |
|
"loss": 0.8489, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 3.0460759493670886, |
|
"grad_norm": 0.7114444699527576, |
|
"learning_rate": 1.5747433727755595e-05, |
|
"loss": 0.8826, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 3.050126582278481, |
|
"grad_norm": 0.617933946293048, |
|
"learning_rate": 1.5691990144020376e-05, |
|
"loss": 0.8443, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 3.0541772151898736, |
|
"grad_norm": 0.6943428154692264, |
|
"learning_rate": 1.5636581256404297e-05, |
|
"loss": 0.8747, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 3.0582278481012657, |
|
"grad_norm": 0.6792956559209735, |
|
"learning_rate": 1.558120751116291e-05, |
|
"loss": 0.8328, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 3.062278481012658, |
|
"grad_norm": 0.5850260060911979, |
|
"learning_rate": 1.552586935426876e-05, |
|
"loss": 0.8476, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 3.0663291139240507, |
|
"grad_norm": 0.6301254783379673, |
|
"learning_rate": 1.547056723140774e-05, |
|
"loss": 0.8517, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 3.0703797468354432, |
|
"grad_norm": 0.6694008952730683, |
|
"learning_rate": 1.5415301587975565e-05, |
|
"loss": 0.8703, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 3.0744303797468353, |
|
"grad_norm": 0.5525784085342285, |
|
"learning_rate": 1.536007286907411e-05, |
|
"loss": 0.8381, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 3.078481012658228, |
|
"grad_norm": 0.5680610355560322, |
|
"learning_rate": 1.5304881519507896e-05, |
|
"loss": 0.8354, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 3.0825316455696203, |
|
"grad_norm": 0.5485095756448677, |
|
"learning_rate": 1.5249727983780453e-05, |
|
"loss": 0.8611, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 3.086582278481013, |
|
"grad_norm": 0.5618432087341189, |
|
"learning_rate": 1.5194612706090786e-05, |
|
"loss": 0.858, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 3.090632911392405, |
|
"grad_norm": 0.5486784099241084, |
|
"learning_rate": 1.5139536130329771e-05, |
|
"loss": 0.846, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 3.0946835443037974, |
|
"grad_norm": 0.6225896834499991, |
|
"learning_rate": 1.508449870007656e-05, |
|
"loss": 0.8513, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 3.09873417721519, |
|
"grad_norm": 0.5392828817736721, |
|
"learning_rate": 1.5029500858595056e-05, |
|
"loss": 0.8523, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 3.1027848101265825, |
|
"grad_norm": 0.6294064087077287, |
|
"learning_rate": 1.4974543048830328e-05, |
|
"loss": 0.8513, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 3.1068354430379745, |
|
"grad_norm": 0.6724661332113148, |
|
"learning_rate": 1.4919625713405e-05, |
|
"loss": 0.8731, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 3.110886075949367, |
|
"grad_norm": 0.5114666067189799, |
|
"learning_rate": 1.4864749294615756e-05, |
|
"loss": 0.8471, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 3.1149367088607596, |
|
"grad_norm": 0.6640261957507612, |
|
"learning_rate": 1.4809914234429716e-05, |
|
"loss": 0.8505, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 3.118987341772152, |
|
"grad_norm": 0.6375009643138477, |
|
"learning_rate": 1.4755120974480923e-05, |
|
"loss": 0.8713, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 3.123037974683544, |
|
"grad_norm": 0.5665719793299382, |
|
"learning_rate": 1.4700369956066771e-05, |
|
"loss": 0.8645, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 3.1270886075949367, |
|
"grad_norm": 0.6508042835076818, |
|
"learning_rate": 1.4645661620144413e-05, |
|
"loss": 0.8615, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 3.131139240506329, |
|
"grad_norm": 0.523022008387731, |
|
"learning_rate": 1.4590996407327284e-05, |
|
"loss": 0.8584, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 3.1351898734177217, |
|
"grad_norm": 0.6046989386315796, |
|
"learning_rate": 1.4536374757881487e-05, |
|
"loss": 0.8676, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 3.1392405063291138, |
|
"grad_norm": 0.686740217615702, |
|
"learning_rate": 1.4481797111722271e-05, |
|
"loss": 0.839, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 3.1432911392405063, |
|
"grad_norm": 0.5166187782569124, |
|
"learning_rate": 1.4427263908410507e-05, |
|
"loss": 0.8558, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 3.147341772151899, |
|
"grad_norm": 0.5577086776852163, |
|
"learning_rate": 1.4372775587149108e-05, |
|
"loss": 0.8628, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 3.1513924050632913, |
|
"grad_norm": 0.5877570910991192, |
|
"learning_rate": 1.4318332586779522e-05, |
|
"loss": 0.8683, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 3.1554430379746834, |
|
"grad_norm": 0.5285863222116387, |
|
"learning_rate": 1.4263935345778202e-05, |
|
"loss": 0.841, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 3.159493670886076, |
|
"grad_norm": 0.49682731118680074, |
|
"learning_rate": 1.420958430225303e-05, |
|
"loss": 0.8238, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 3.1635443037974684, |
|
"grad_norm": 0.547394026090636, |
|
"learning_rate": 1.415527989393985e-05, |
|
"loss": 0.8604, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 3.167594936708861, |
|
"grad_norm": 0.47926112406609717, |
|
"learning_rate": 1.410102255819891e-05, |
|
"loss": 0.8733, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 3.171645569620253, |
|
"grad_norm": 0.5199765954548031, |
|
"learning_rate": 1.404681273201131e-05, |
|
"loss": 0.8508, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 3.1756962025316455, |
|
"grad_norm": 0.5373819617556451, |
|
"learning_rate": 1.399265085197556e-05, |
|
"loss": 0.8486, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 3.179746835443038, |
|
"grad_norm": 0.47779899464747505, |
|
"learning_rate": 1.393853735430398e-05, |
|
"loss": 0.87, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 3.1837974683544306, |
|
"grad_norm": 0.5107792600372495, |
|
"learning_rate": 1.3884472674819246e-05, |
|
"loss": 0.8462, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 3.1878481012658226, |
|
"grad_norm": 0.6018675418247474, |
|
"learning_rate": 1.3830457248950864e-05, |
|
"loss": 0.8396, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 3.191898734177215, |
|
"grad_norm": 0.48484303516460425, |
|
"learning_rate": 1.377649151173163e-05, |
|
"loss": 0.8454, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 3.1959493670886077, |
|
"grad_norm": 0.5322482082345672, |
|
"learning_rate": 1.3722575897794181e-05, |
|
"loss": 0.8616, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 0.5537949900707131, |
|
"learning_rate": 1.3668710841367472e-05, |
|
"loss": 0.8483, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 3.2040506329113922, |
|
"grad_norm": 0.51453721945, |
|
"learning_rate": 1.361489677627324e-05, |
|
"loss": 0.8685, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 3.2081012658227848, |
|
"grad_norm": 0.5012382718314914, |
|
"learning_rate": 1.3561134135922585e-05, |
|
"loss": 0.8544, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 3.2121518987341773, |
|
"grad_norm": 0.5058488987870322, |
|
"learning_rate": 1.350742335331241e-05, |
|
"loss": 0.8697, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 3.21620253164557, |
|
"grad_norm": 0.49506017951103093, |
|
"learning_rate": 1.345376486102198e-05, |
|
"loss": 0.8515, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 3.220253164556962, |
|
"grad_norm": 0.4921062485871461, |
|
"learning_rate": 1.3400159091209414e-05, |
|
"loss": 0.8704, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 3.2243037974683544, |
|
"grad_norm": 0.4715942980907537, |
|
"learning_rate": 1.3346606475608216e-05, |
|
"loss": 0.8624, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 3.228354430379747, |
|
"grad_norm": 0.5167271281630322, |
|
"learning_rate": 1.3293107445523781e-05, |
|
"loss": 0.8746, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 3.2324050632911394, |
|
"grad_norm": 0.4715008273775736, |
|
"learning_rate": 1.3239662431829949e-05, |
|
"loss": 0.8768, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 3.2364556962025315, |
|
"grad_norm": 0.4718589866200359, |
|
"learning_rate": 1.3186271864965509e-05, |
|
"loss": 0.8407, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 3.240506329113924, |
|
"grad_norm": 0.4948416711430985, |
|
"learning_rate": 1.3132936174930756e-05, |
|
"loss": 0.8523, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 3.2445569620253165, |
|
"grad_norm": 0.4988185986406195, |
|
"learning_rate": 1.3079655791283995e-05, |
|
"loss": 0.848, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 3.248607594936709, |
|
"grad_norm": 0.5168591169036743, |
|
"learning_rate": 1.3026431143138108e-05, |
|
"loss": 0.8852, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 3.252658227848101, |
|
"grad_norm": 0.5108886392907334, |
|
"learning_rate": 1.2973262659157114e-05, |
|
"loss": 0.8551, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 3.2567088607594936, |
|
"grad_norm": 0.4826735949338651, |
|
"learning_rate": 1.2920150767552651e-05, |
|
"loss": 0.8744, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 3.260759493670886, |
|
"grad_norm": 0.5475371964312847, |
|
"learning_rate": 1.2867095896080607e-05, |
|
"loss": 0.8659, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 3.2648101265822786, |
|
"grad_norm": 0.5402728331837098, |
|
"learning_rate": 1.2814098472037612e-05, |
|
"loss": 0.8626, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 3.2688607594936707, |
|
"grad_norm": 0.5374773441841417, |
|
"learning_rate": 1.276115892225764e-05, |
|
"loss": 0.8552, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 3.2729113924050632, |
|
"grad_norm": 0.49334675206037387, |
|
"learning_rate": 1.2708277673108555e-05, |
|
"loss": 0.8621, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 3.2769620253164558, |
|
"grad_norm": 0.5811335818898145, |
|
"learning_rate": 1.2655455150488649e-05, |
|
"loss": 0.8678, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 3.2810126582278483, |
|
"grad_norm": 0.5221978329717215, |
|
"learning_rate": 1.2602691779823272e-05, |
|
"loss": 0.8713, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 3.2850632911392403, |
|
"grad_norm": 0.5545509486631972, |
|
"learning_rate": 1.2549987986061355e-05, |
|
"loss": 0.8556, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 3.289113924050633, |
|
"grad_norm": 0.4851448552925207, |
|
"learning_rate": 1.2497344193672005e-05, |
|
"loss": 0.8429, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 3.2931645569620254, |
|
"grad_norm": 0.5203222948539638, |
|
"learning_rate": 1.2444760826641092e-05, |
|
"loss": 0.877, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 3.297215189873418, |
|
"grad_norm": 0.5011749672391853, |
|
"learning_rate": 1.2392238308467817e-05, |
|
"loss": 0.8444, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 3.30126582278481, |
|
"grad_norm": 0.49002170003142786, |
|
"learning_rate": 1.2339777062161326e-05, |
|
"loss": 0.8722, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 3.3053164556962025, |
|
"grad_norm": 0.45090799234723117, |
|
"learning_rate": 1.2287377510237293e-05, |
|
"loss": 0.8629, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 3.309367088607595, |
|
"grad_norm": 0.5154154906832641, |
|
"learning_rate": 1.2235040074714488e-05, |
|
"loss": 0.8466, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 3.3134177215189875, |
|
"grad_norm": 0.5042249940801934, |
|
"learning_rate": 1.2182765177111434e-05, |
|
"loss": 0.8543, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 3.3174683544303796, |
|
"grad_norm": 0.47777314558949063, |
|
"learning_rate": 1.213055323844297e-05, |
|
"loss": 0.8413, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 3.321518987341772, |
|
"grad_norm": 0.5129928648171661, |
|
"learning_rate": 1.2078404679216864e-05, |
|
"loss": 0.8662, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 3.3255696202531646, |
|
"grad_norm": 0.4747399922839749, |
|
"learning_rate": 1.2026319919430458e-05, |
|
"loss": 0.8659, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 3.329620253164557, |
|
"grad_norm": 0.5178435161207365, |
|
"learning_rate": 1.1974299378567227e-05, |
|
"loss": 0.8407, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 3.333670886075949, |
|
"grad_norm": 0.441372049072934, |
|
"learning_rate": 1.1922343475593462e-05, |
|
"loss": 0.8597, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 3.3377215189873417, |
|
"grad_norm": 0.48696560180458887, |
|
"learning_rate": 1.187045262895488e-05, |
|
"loss": 0.8896, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 3.3417721518987342, |
|
"grad_norm": 0.4793400452479297, |
|
"learning_rate": 1.1818627256573203e-05, |
|
"loss": 0.8461, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 3.3458227848101267, |
|
"grad_norm": 0.4498068326285848, |
|
"learning_rate": 1.1766867775842864e-05, |
|
"loss": 0.8594, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 3.349873417721519, |
|
"grad_norm": 0.469888209566026, |
|
"learning_rate": 1.1715174603627615e-05, |
|
"loss": 0.8536, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 3.3539240506329113, |
|
"grad_norm": 0.47705766739937333, |
|
"learning_rate": 1.1663548156257147e-05, |
|
"loss": 0.8691, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 3.357974683544304, |
|
"grad_norm": 0.45605148536097295, |
|
"learning_rate": 1.161198884952377e-05, |
|
"loss": 0.89, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 3.3620253164556964, |
|
"grad_norm": 0.5470579389762384, |
|
"learning_rate": 1.1560497098679056e-05, |
|
"loss": 0.8559, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 3.3660759493670884, |
|
"grad_norm": 0.49464733549203643, |
|
"learning_rate": 1.1509073318430479e-05, |
|
"loss": 0.8604, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 3.370126582278481, |
|
"grad_norm": 0.4682054358413757, |
|
"learning_rate": 1.1457717922938116e-05, |
|
"loss": 0.8544, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 3.3741772151898735, |
|
"grad_norm": 0.49773485385052296, |
|
"learning_rate": 1.1406431325811233e-05, |
|
"loss": 0.8437, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 3.378227848101266, |
|
"grad_norm": 0.4638899094688478, |
|
"learning_rate": 1.135521394010506e-05, |
|
"loss": 0.8655, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 3.382278481012658, |
|
"grad_norm": 0.4892214322439687, |
|
"learning_rate": 1.1304066178317367e-05, |
|
"loss": 0.8628, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 3.3863291139240506, |
|
"grad_norm": 0.4720366516869082, |
|
"learning_rate": 1.1252988452385199e-05, |
|
"loss": 0.8466, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 3.390379746835443, |
|
"grad_norm": 0.5050874497283874, |
|
"learning_rate": 1.1201981173681536e-05, |
|
"loss": 0.8586, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 3.3944303797468356, |
|
"grad_norm": 0.46039202617554503, |
|
"learning_rate": 1.1151044753011991e-05, |
|
"loss": 0.8502, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 3.3984810126582277, |
|
"grad_norm": 0.4741288489695432, |
|
"learning_rate": 1.1100179600611491e-05, |
|
"loss": 0.8485, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 3.40253164556962, |
|
"grad_norm": 0.4544383608758889, |
|
"learning_rate": 1.1049386126140985e-05, |
|
"loss": 0.863, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 3.4065822784810127, |
|
"grad_norm": 0.44421783829640366, |
|
"learning_rate": 1.0998664738684128e-05, |
|
"loss": 0.8601, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 3.410632911392405, |
|
"grad_norm": 0.474671674144896, |
|
"learning_rate": 1.0948015846744e-05, |
|
"loss": 0.869, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 3.4146835443037973, |
|
"grad_norm": 0.4726014331049998, |
|
"learning_rate": 1.0897439858239832e-05, |
|
"loss": 0.8612, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 3.41873417721519, |
|
"grad_norm": 0.42591144693962607, |
|
"learning_rate": 1.0846937180503652e-05, |
|
"loss": 0.8615, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 3.4227848101265823, |
|
"grad_norm": 0.49038026410144775, |
|
"learning_rate": 1.0796508220277117e-05, |
|
"loss": 0.8623, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 3.426835443037975, |
|
"grad_norm": 0.4349745417581744, |
|
"learning_rate": 1.0746153383708107e-05, |
|
"loss": 0.8327, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 3.430886075949367, |
|
"grad_norm": 0.4605217677295788, |
|
"learning_rate": 1.0695873076347579e-05, |
|
"loss": 0.8629, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 3.4349367088607594, |
|
"grad_norm": 0.4688154246012126, |
|
"learning_rate": 1.0645667703146205e-05, |
|
"loss": 0.8612, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 3.438987341772152, |
|
"grad_norm": 0.442719788770277, |
|
"learning_rate": 1.0595537668451161e-05, |
|
"loss": 0.8525, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 3.4430379746835444, |
|
"grad_norm": 0.4353956061570409, |
|
"learning_rate": 1.0545483376002854e-05, |
|
"loss": 0.8394, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 3.4470886075949365, |
|
"grad_norm": 0.4666698629651009, |
|
"learning_rate": 1.0495505228931676e-05, |
|
"loss": 0.8615, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 3.451139240506329, |
|
"grad_norm": 0.4896865629985745, |
|
"learning_rate": 1.044560362975474e-05, |
|
"loss": 0.865, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 3.4551898734177215, |
|
"grad_norm": 0.45322888201532946, |
|
"learning_rate": 1.0395778980372695e-05, |
|
"loss": 0.864, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 3.459240506329114, |
|
"grad_norm": 0.45956447961461894, |
|
"learning_rate": 1.0346031682066381e-05, |
|
"loss": 0.8508, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 3.463291139240506, |
|
"grad_norm": 0.47279988072578844, |
|
"learning_rate": 1.0296362135493724e-05, |
|
"loss": 0.8712, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 3.4673417721518986, |
|
"grad_norm": 0.5077997030799645, |
|
"learning_rate": 1.0246770740686422e-05, |
|
"loss": 0.8674, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 3.471392405063291, |
|
"grad_norm": 0.46873177403947935, |
|
"learning_rate": 1.0197257897046743e-05, |
|
"loss": 0.8663, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 3.4754430379746837, |
|
"grad_norm": 0.5353437167850071, |
|
"learning_rate": 1.014782400334433e-05, |
|
"loss": 0.8519, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 3.479493670886076, |
|
"grad_norm": 0.4543633512495401, |
|
"learning_rate": 1.009846945771296e-05, |
|
"loss": 0.8658, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 3.4835443037974683, |
|
"grad_norm": 0.49264460762343015, |
|
"learning_rate": 1.0049194657647363e-05, |
|
"loss": 0.8513, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 3.487594936708861, |
|
"grad_norm": 0.4951063230915622, |
|
"learning_rate": 1.0000000000000006e-05, |
|
"loss": 0.8667, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 3.4916455696202533, |
|
"grad_norm": 0.44099396444065886, |
|
"learning_rate": 9.950885880977891e-06, |
|
"loss": 0.8303, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 3.4956962025316454, |
|
"grad_norm": 0.4650406046530908, |
|
"learning_rate": 9.901852696139382e-06, |
|
"loss": 0.8558, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 3.499746835443038, |
|
"grad_norm": 0.49381916430436745, |
|
"learning_rate": 9.852900840391027e-06, |
|
"loss": 0.8758, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 3.5037974683544304, |
|
"grad_norm": 0.45152550937213043, |
|
"learning_rate": 9.804030707984313e-06, |
|
"loss": 0.8633, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 3.507848101265823, |
|
"grad_norm": 0.4704217587868533, |
|
"learning_rate": 9.755242692512599e-06, |
|
"loss": 0.8623, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 3.5118987341772154, |
|
"grad_norm": 0.48724891824680505, |
|
"learning_rate": 9.70653718690782e-06, |
|
"loss": 0.8612, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 3.5159493670886075, |
|
"grad_norm": 0.5182634549927265, |
|
"learning_rate": 9.657914583437454e-06, |
|
"loss": 0.8616, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"grad_norm": 0.4841175626016344, |
|
"learning_rate": 9.609375273701246e-06, |
|
"loss": 0.8682, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 3.5240506329113925, |
|
"grad_norm": 0.44398176561843006, |
|
"learning_rate": 9.560919648628133e-06, |
|
"loss": 0.835, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 3.5281012658227846, |
|
"grad_norm": 0.46032212217150315, |
|
"learning_rate": 9.512548098473047e-06, |
|
"loss": 0.8485, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 3.532151898734177, |
|
"grad_norm": 0.4853228588172091, |
|
"learning_rate": 9.464261012813825e-06, |
|
"loss": 0.8801, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 3.5362025316455696, |
|
"grad_norm": 0.4667046031555135, |
|
"learning_rate": 9.416058780547987e-06, |
|
"loss": 0.8458, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 3.540253164556962, |
|
"grad_norm": 0.43356477955034306, |
|
"learning_rate": 9.367941789889714e-06, |
|
"loss": 0.8456, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 3.5443037974683547, |
|
"grad_norm": 0.45852910904743077, |
|
"learning_rate": 9.319910428366607e-06, |
|
"loss": 0.8685, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 3.5483544303797467, |
|
"grad_norm": 0.47269673230146425, |
|
"learning_rate": 9.271965082816667e-06, |
|
"loss": 0.8544, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 3.5524050632911393, |
|
"grad_norm": 0.48028146837920654, |
|
"learning_rate": 9.224106139385111e-06, |
|
"loss": 0.8636, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 3.5564556962025318, |
|
"grad_norm": 0.43948553608546737, |
|
"learning_rate": 9.176333983521291e-06, |
|
"loss": 0.881, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 3.560506329113924, |
|
"grad_norm": 0.441711083164455, |
|
"learning_rate": 9.12864899997558e-06, |
|
"loss": 0.8553, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 3.5645569620253164, |
|
"grad_norm": 0.49809664632255973, |
|
"learning_rate": 9.08105157279628e-06, |
|
"loss": 0.8705, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 3.568607594936709, |
|
"grad_norm": 0.45473410569513895, |
|
"learning_rate": 9.03354208532653e-06, |
|
"loss": 0.8564, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 3.5726582278481014, |
|
"grad_norm": 0.4503261802650196, |
|
"learning_rate": 8.986120920201205e-06, |
|
"loss": 0.8491, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 3.576708860759494, |
|
"grad_norm": 0.4855618703994967, |
|
"learning_rate": 8.938788459343852e-06, |
|
"loss": 0.8547, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 3.580759493670886, |
|
"grad_norm": 0.4293433445469158, |
|
"learning_rate": 8.8915450839636e-06, |
|
"loss": 0.8683, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 3.5848101265822785, |
|
"grad_norm": 0.45460836785133596, |
|
"learning_rate": 8.844391174552116e-06, |
|
"loss": 0.8766, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 3.588860759493671, |
|
"grad_norm": 0.4472154302647609, |
|
"learning_rate": 8.797327110880479e-06, |
|
"loss": 0.8533, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 3.592911392405063, |
|
"grad_norm": 0.42658969102079963, |
|
"learning_rate": 8.750353271996206e-06, |
|
"loss": 0.8683, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 3.5969620253164556, |
|
"grad_norm": 0.43476724912119574, |
|
"learning_rate": 8.703470036220132e-06, |
|
"loss": 0.8606, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 3.601012658227848, |
|
"grad_norm": 0.44895103957587856, |
|
"learning_rate": 8.656677781143394e-06, |
|
"loss": 0.8442, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 3.6050632911392406, |
|
"grad_norm": 0.45367721227215985, |
|
"learning_rate": 8.609976883624377e-06, |
|
"loss": 0.8492, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 3.609113924050633, |
|
"grad_norm": 0.4245925418018429, |
|
"learning_rate": 8.563367719785698e-06, |
|
"loss": 0.8423, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 3.613164556962025, |
|
"grad_norm": 0.4487673139347265, |
|
"learning_rate": 8.516850665011138e-06, |
|
"loss": 0.862, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 3.6172151898734177, |
|
"grad_norm": 0.45047147576212493, |
|
"learning_rate": 8.47042609394269e-06, |
|
"loss": 0.8492, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 3.6212658227848102, |
|
"grad_norm": 0.4629569305659359, |
|
"learning_rate": 8.424094380477432e-06, |
|
"loss": 0.8708, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 3.6253164556962023, |
|
"grad_norm": 0.4347177828032504, |
|
"learning_rate": 8.37785589776465e-06, |
|
"loss": 0.8612, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 3.629367088607595, |
|
"grad_norm": 0.44401055368542536, |
|
"learning_rate": 8.331711018202694e-06, |
|
"loss": 0.8444, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 3.6334177215189873, |
|
"grad_norm": 0.4341929762403499, |
|
"learning_rate": 8.285660113436104e-06, |
|
"loss": 0.8687, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 3.63746835443038, |
|
"grad_norm": 0.4604145885247143, |
|
"learning_rate": 8.239703554352527e-06, |
|
"loss": 0.8822, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 3.6415189873417724, |
|
"grad_norm": 0.4510126094907305, |
|
"learning_rate": 8.193841711079775e-06, |
|
"loss": 0.8725, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 3.6455696202531644, |
|
"grad_norm": 0.43841890839465214, |
|
"learning_rate": 8.148074952982828e-06, |
|
"loss": 0.8668, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 3.649620253164557, |
|
"grad_norm": 0.43377494756106416, |
|
"learning_rate": 8.102403648660859e-06, |
|
"loss": 0.8536, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 3.6536708860759495, |
|
"grad_norm": 0.4544241485280512, |
|
"learning_rate": 8.056828165944282e-06, |
|
"loss": 0.8682, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 3.6577215189873415, |
|
"grad_norm": 0.4300511245757169, |
|
"learning_rate": 8.011348871891762e-06, |
|
"loss": 0.8672, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 3.661772151898734, |
|
"grad_norm": 0.45722018896945604, |
|
"learning_rate": 7.965966132787287e-06, |
|
"loss": 0.8613, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 3.6658227848101266, |
|
"grad_norm": 0.4626337467017989, |
|
"learning_rate": 7.920680314137189e-06, |
|
"loss": 0.8757, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 3.669873417721519, |
|
"grad_norm": 0.4245502855792999, |
|
"learning_rate": 7.875491780667246e-06, |
|
"loss": 0.8506, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 3.6739240506329116, |
|
"grad_norm": 0.44267397354146654, |
|
"learning_rate": 7.830400896319667e-06, |
|
"loss": 0.8754, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 3.6779746835443037, |
|
"grad_norm": 0.43255853672787625, |
|
"learning_rate": 7.785408024250259e-06, |
|
"loss": 0.8582, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 3.682025316455696, |
|
"grad_norm": 0.4253317983644499, |
|
"learning_rate": 7.74051352682542e-06, |
|
"loss": 0.8765, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 3.6860759493670887, |
|
"grad_norm": 0.4302880799649695, |
|
"learning_rate": 7.695717765619257e-06, |
|
"loss": 0.8344, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 3.690126582278481, |
|
"grad_norm": 0.4339420553039311, |
|
"learning_rate": 7.651021101410673e-06, |
|
"loss": 0.8611, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 3.6941772151898733, |
|
"grad_norm": 0.43131845868563506, |
|
"learning_rate": 7.606423894180464e-06, |
|
"loss": 0.8578, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 3.698227848101266, |
|
"grad_norm": 0.41622739962031957, |
|
"learning_rate": 7.56192650310839e-06, |
|
"loss": 0.8538, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 3.7022784810126583, |
|
"grad_norm": 0.4356736702915069, |
|
"learning_rate": 7.517529286570349e-06, |
|
"loss": 0.8751, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 3.706329113924051, |
|
"grad_norm": 0.44266438492323185, |
|
"learning_rate": 7.473232602135387e-06, |
|
"loss": 0.8465, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 3.710379746835443, |
|
"grad_norm": 0.4199543977807518, |
|
"learning_rate": 7.429036806562935e-06, |
|
"loss": 0.8475, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 3.7144303797468354, |
|
"grad_norm": 0.42898294742075693, |
|
"learning_rate": 7.3849422557998455e-06, |
|
"loss": 0.8579, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 3.718481012658228, |
|
"grad_norm": 0.45372597212590554, |
|
"learning_rate": 7.340949304977567e-06, |
|
"loss": 0.877, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 3.72253164556962, |
|
"grad_norm": 0.4782097675487259, |
|
"learning_rate": 7.297058308409282e-06, |
|
"loss": 0.8605, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 3.7265822784810125, |
|
"grad_norm": 0.42667519173035895, |
|
"learning_rate": 7.25326961958704e-06, |
|
"loss": 0.845, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 3.730632911392405, |
|
"grad_norm": 0.4412596572308266, |
|
"learning_rate": 7.209583591178921e-06, |
|
"loss": 0.8782, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 3.7346835443037976, |
|
"grad_norm": 0.43721685167453056, |
|
"learning_rate": 7.1660005750261925e-06, |
|
"loss": 0.8581, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 3.73873417721519, |
|
"grad_norm": 0.4358381565410245, |
|
"learning_rate": 7.1225209221404765e-06, |
|
"loss": 0.8539, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 3.742784810126582, |
|
"grad_norm": 0.46709876598481115, |
|
"learning_rate": 7.079144982700909e-06, |
|
"loss": 0.8577, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 3.7468354430379747, |
|
"grad_norm": 0.4318497674876077, |
|
"learning_rate": 7.0358731060513695e-06, |
|
"loss": 0.8579, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 3.750886075949367, |
|
"grad_norm": 0.45093121162418454, |
|
"learning_rate": 6.99270564069757e-06, |
|
"loss": 0.8559, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 3.7549367088607593, |
|
"grad_norm": 0.43365208597287247, |
|
"learning_rate": 6.949642934304375e-06, |
|
"loss": 0.8722, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 3.7589873417721518, |
|
"grad_norm": 0.4222340533854372, |
|
"learning_rate": 6.906685333692871e-06, |
|
"loss": 0.8546, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 3.7630379746835443, |
|
"grad_norm": 0.4306142019218229, |
|
"learning_rate": 6.86383318483769e-06, |
|
"loss": 0.8507, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 3.767088607594937, |
|
"grad_norm": 0.434284943368926, |
|
"learning_rate": 6.821086832864139e-06, |
|
"loss": 0.8411, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 3.7711392405063293, |
|
"grad_norm": 0.42871593278171216, |
|
"learning_rate": 6.77844662204546e-06, |
|
"loss": 0.8332, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 3.7751898734177214, |
|
"grad_norm": 0.44833722344271615, |
|
"learning_rate": 6.7359128958000455e-06, |
|
"loss": 0.8531, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 3.779240506329114, |
|
"grad_norm": 0.4306046713177489, |
|
"learning_rate": 6.693485996688695e-06, |
|
"loss": 0.8814, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 3.7832911392405064, |
|
"grad_norm": 0.44848781499988855, |
|
"learning_rate": 6.651166266411801e-06, |
|
"loss": 0.8504, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 3.7873417721518985, |
|
"grad_norm": 0.4243191033573401, |
|
"learning_rate": 6.6089540458066725e-06, |
|
"loss": 0.8696, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 3.791392405063291, |
|
"grad_norm": 0.4199135048918373, |
|
"learning_rate": 6.566849674844711e-06, |
|
"loss": 0.8612, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 3.7954430379746835, |
|
"grad_norm": 0.44378401886314917, |
|
"learning_rate": 6.524853492628747e-06, |
|
"loss": 0.8341, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 3.799493670886076, |
|
"grad_norm": 0.4321715154290951, |
|
"learning_rate": 6.4829658373902536e-06, |
|
"loss": 0.8552, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 3.8035443037974686, |
|
"grad_norm": 0.42871133064396516, |
|
"learning_rate": 6.441187046486648e-06, |
|
"loss": 0.8454, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 3.8075949367088606, |
|
"grad_norm": 0.4689330655595317, |
|
"learning_rate": 6.399517456398567e-06, |
|
"loss": 0.859, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 3.811645569620253, |
|
"grad_norm": 0.4355530919401553, |
|
"learning_rate": 6.357957402727164e-06, |
|
"loss": 0.8592, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 3.8156962025316457, |
|
"grad_norm": 0.4504301897111289, |
|
"learning_rate": 6.316507220191395e-06, |
|
"loss": 0.8643, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 3.8197468354430377, |
|
"grad_norm": 0.42189873529982413, |
|
"learning_rate": 6.275167242625331e-06, |
|
"loss": 0.8608, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 3.8237974683544302, |
|
"grad_norm": 0.4190743313607874, |
|
"learning_rate": 6.233937802975471e-06, |
|
"loss": 0.8394, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 3.8278481012658228, |
|
"grad_norm": 0.4279058445407614, |
|
"learning_rate": 6.192819233298046e-06, |
|
"loss": 0.8795, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 3.8318987341772153, |
|
"grad_norm": 0.4302420482429957, |
|
"learning_rate": 6.151811864756383e-06, |
|
"loss": 0.8399, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 3.835949367088608, |
|
"grad_norm": 0.4270955513795819, |
|
"learning_rate": 6.1109160276181655e-06, |
|
"loss": 0.8529, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"grad_norm": 0.4121961072304005, |
|
"learning_rate": 6.070132051252868e-06, |
|
"loss": 0.8742, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 3.8440506329113924, |
|
"grad_norm": 0.43858761693119175, |
|
"learning_rate": 6.0294602641290034e-06, |
|
"loss": 0.8696, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 3.848101265822785, |
|
"grad_norm": 0.4405054286279495, |
|
"learning_rate": 5.988900993811575e-06, |
|
"loss": 0.8412, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 3.852151898734177, |
|
"grad_norm": 0.40862724538485984, |
|
"learning_rate": 5.948454566959363e-06, |
|
"loss": 0.8525, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 3.8562025316455695, |
|
"grad_norm": 0.4168952589362802, |
|
"learning_rate": 5.908121309322328e-06, |
|
"loss": 0.8478, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 3.860253164556962, |
|
"grad_norm": 0.44683118210279316, |
|
"learning_rate": 5.867901545738976e-06, |
|
"loss": 0.8535, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 3.8643037974683545, |
|
"grad_norm": 0.45633524728567043, |
|
"learning_rate": 5.827795600133774e-06, |
|
"loss": 0.8519, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 3.868354430379747, |
|
"grad_norm": 0.4137304246484163, |
|
"learning_rate": 5.787803795514466e-06, |
|
"loss": 0.8294, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 3.872405063291139, |
|
"grad_norm": 0.40517432394566916, |
|
"learning_rate": 5.747926453969576e-06, |
|
"loss": 0.8496, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 3.8764556962025316, |
|
"grad_norm": 0.4551332544227311, |
|
"learning_rate": 5.708163896665708e-06, |
|
"loss": 0.8448, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 3.880506329113924, |
|
"grad_norm": 0.4426683058120676, |
|
"learning_rate": 5.668516443845047e-06, |
|
"loss": 0.848, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 3.884556962025316, |
|
"grad_norm": 0.42814054952957725, |
|
"learning_rate": 5.6289844148227225e-06, |
|
"loss": 0.853, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 3.8886075949367087, |
|
"grad_norm": 0.41995701596474777, |
|
"learning_rate": 5.5895681279842615e-06, |
|
"loss": 0.8654, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 3.8926582278481012, |
|
"grad_norm": 0.4328372730710024, |
|
"learning_rate": 5.550267900783019e-06, |
|
"loss": 0.8401, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 3.8967088607594937, |
|
"grad_norm": 0.44836473702417, |
|
"learning_rate": 5.511084049737623e-06, |
|
"loss": 0.8789, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 3.9007594936708863, |
|
"grad_norm": 0.4299508183685661, |
|
"learning_rate": 5.4720168904294215e-06, |
|
"loss": 0.8619, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 3.9048101265822783, |
|
"grad_norm": 0.45508823108559676, |
|
"learning_rate": 5.433066737499948e-06, |
|
"loss": 0.8745, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 3.908860759493671, |
|
"grad_norm": 0.43349126432492535, |
|
"learning_rate": 5.394233904648376e-06, |
|
"loss": 0.8634, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 3.9129113924050634, |
|
"grad_norm": 0.43620502823452584, |
|
"learning_rate": 5.355518704628997e-06, |
|
"loss": 0.8627, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 3.9169620253164554, |
|
"grad_norm": 0.4628623135193572, |
|
"learning_rate": 5.316921449248731e-06, |
|
"loss": 0.8466, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 3.921012658227848, |
|
"grad_norm": 0.4272519685837642, |
|
"learning_rate": 5.278442449364538e-06, |
|
"loss": 0.8697, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 3.9250632911392405, |
|
"grad_norm": 0.4116999538454061, |
|
"learning_rate": 5.240082014881016e-06, |
|
"loss": 0.8436, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 3.929113924050633, |
|
"grad_norm": 0.43896133333935833, |
|
"learning_rate": 5.201840454747822e-06, |
|
"loss": 0.8419, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 3.9331645569620255, |
|
"grad_norm": 0.4476967228913993, |
|
"learning_rate": 5.163718076957223e-06, |
|
"loss": 0.8416, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 3.9372151898734176, |
|
"grad_norm": 0.4436884835655507, |
|
"learning_rate": 5.125715188541609e-06, |
|
"loss": 0.8708, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 3.94126582278481, |
|
"grad_norm": 0.41944909267345304, |
|
"learning_rate": 5.087832095571021e-06, |
|
"loss": 0.8455, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 3.9453164556962026, |
|
"grad_norm": 0.40816168274947523, |
|
"learning_rate": 5.0500691031506766e-06, |
|
"loss": 0.8562, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 3.9493670886075947, |
|
"grad_norm": 0.41313944786583956, |
|
"learning_rate": 5.01242651541854e-06, |
|
"loss": 0.8581, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 3.953417721518987, |
|
"grad_norm": 0.42754059642545406, |
|
"learning_rate": 4.974904635542815e-06, |
|
"loss": 0.8379, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 3.9574683544303797, |
|
"grad_norm": 0.40913005879554865, |
|
"learning_rate": 4.937503765719582e-06, |
|
"loss": 0.8534, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 3.961518987341772, |
|
"grad_norm": 0.42831682529661047, |
|
"learning_rate": 4.900224207170299e-06, |
|
"loss": 0.8485, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 3.9655696202531647, |
|
"grad_norm": 0.44262886273491836, |
|
"learning_rate": 4.8630662601394065e-06, |
|
"loss": 0.8627, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 3.969620253164557, |
|
"grad_norm": 0.4181257483904811, |
|
"learning_rate": 4.8260302238918995e-06, |
|
"loss": 0.8524, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 3.9736708860759493, |
|
"grad_norm": 0.4143983358723759, |
|
"learning_rate": 4.789116396710924e-06, |
|
"loss": 0.8497, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 3.977721518987342, |
|
"grad_norm": 0.41457448251945356, |
|
"learning_rate": 4.752325075895368e-06, |
|
"loss": 0.8479, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 3.981772151898734, |
|
"grad_norm": 0.435093984728811, |
|
"learning_rate": 4.715656557757473e-06, |
|
"loss": 0.8334, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 3.9858227848101264, |
|
"grad_norm": 0.4377403617922161, |
|
"learning_rate": 4.679111137620442e-06, |
|
"loss": 0.8482, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 3.989873417721519, |
|
"grad_norm": 0.4267208530940564, |
|
"learning_rate": 4.6426891098160585e-06, |
|
"loss": 0.8599, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 3.9939240506329114, |
|
"grad_norm": 0.41038153079985584, |
|
"learning_rate": 4.6063907676823474e-06, |
|
"loss": 0.8468, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 3.997974683544304, |
|
"grad_norm": 0.4112766723167426, |
|
"learning_rate": 4.570216403561141e-06, |
|
"loss": 0.8539, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 4.002025316455696, |
|
"grad_norm": 0.6611584306513932, |
|
"learning_rate": 4.534166308795815e-06, |
|
"loss": 0.7993, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 4.006075949367089, |
|
"grad_norm": 0.9583794298794673, |
|
"learning_rate": 4.498240773728859e-06, |
|
"loss": 0.7376, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 4.010126582278481, |
|
"grad_norm": 0.7321579122000444, |
|
"learning_rate": 4.462440087699609e-06, |
|
"loss": 0.7253, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 4.014177215189873, |
|
"grad_norm": 0.6579865184281549, |
|
"learning_rate": 4.426764539041861e-06, |
|
"loss": 0.729, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 4.018227848101266, |
|
"grad_norm": 1.242467236037362, |
|
"learning_rate": 4.391214415081582e-06, |
|
"loss": 0.7224, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 4.022278481012658, |
|
"grad_norm": 1.374220968163499, |
|
"learning_rate": 4.355790002134579e-06, |
|
"loss": 0.7133, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 4.02632911392405, |
|
"grad_norm": 0.9083786058044742, |
|
"learning_rate": 4.320491585504207e-06, |
|
"loss": 0.687, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 4.030379746835443, |
|
"grad_norm": 0.7665226288021144, |
|
"learning_rate": 4.2853194494790615e-06, |
|
"loss": 0.7111, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 4.034430379746835, |
|
"grad_norm": 0.856742534984717, |
|
"learning_rate": 4.250273877330691e-06, |
|
"loss": 0.7122, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 4.038481012658228, |
|
"grad_norm": 0.8446093533763239, |
|
"learning_rate": 4.215355151311313e-06, |
|
"loss": 0.7157, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 4.04253164556962, |
|
"grad_norm": 0.7094851506069519, |
|
"learning_rate": 4.180563552651542e-06, |
|
"loss": 0.7046, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 4.046582278481012, |
|
"grad_norm": 0.6268606686154433, |
|
"learning_rate": 4.145899361558147e-06, |
|
"loss": 0.7106, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 4.050632911392405, |
|
"grad_norm": 0.6228247669725995, |
|
"learning_rate": 4.111362857211738e-06, |
|
"loss": 0.7191, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 4.054683544303797, |
|
"grad_norm": 0.7015812584466826, |
|
"learning_rate": 4.076954317764592e-06, |
|
"loss": 0.6865, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 4.0587341772151895, |
|
"grad_norm": 0.7440031416535453, |
|
"learning_rate": 4.042674020338335e-06, |
|
"loss": 0.7307, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 4.062784810126582, |
|
"grad_norm": 0.7114773976875085, |
|
"learning_rate": 4.0085222410217835e-06, |
|
"loss": 0.7175, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 4.0668354430379745, |
|
"grad_norm": 0.5646695877084114, |
|
"learning_rate": 3.974499254868674e-06, |
|
"loss": 0.6985, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 4.0708860759493675, |
|
"grad_norm": 0.5839802640498584, |
|
"learning_rate": 3.940605335895451e-06, |
|
"loss": 0.7109, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 4.0749367088607595, |
|
"grad_norm": 0.5858288553551011, |
|
"learning_rate": 3.90684075707908e-06, |
|
"loss": 0.7088, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 4.078987341772152, |
|
"grad_norm": 0.6339025783641914, |
|
"learning_rate": 3.8732057903548505e-06, |
|
"loss": 0.7034, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 4.083037974683545, |
|
"grad_norm": 0.6140790256864238, |
|
"learning_rate": 3.8397007066141375e-06, |
|
"loss": 0.7163, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 4.087088607594937, |
|
"grad_norm": 0.5302804531972476, |
|
"learning_rate": 3.806325775702304e-06, |
|
"loss": 0.6978, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 4.091139240506329, |
|
"grad_norm": 0.5509354719553615, |
|
"learning_rate": 3.773081266416434e-06, |
|
"loss": 0.7225, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 4.095189873417722, |
|
"grad_norm": 0.568709054740914, |
|
"learning_rate": 3.739967446503245e-06, |
|
"loss": 0.717, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 4.099240506329114, |
|
"grad_norm": 0.5678990116502838, |
|
"learning_rate": 3.706984582656894e-06, |
|
"loss": 0.7111, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 4.103291139240507, |
|
"grad_norm": 0.5766307451681424, |
|
"learning_rate": 3.6741329405168237e-06, |
|
"loss": 0.7017, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 4.107341772151899, |
|
"grad_norm": 0.5198608496598639, |
|
"learning_rate": 3.641412784665648e-06, |
|
"loss": 0.6903, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 4.111392405063291, |
|
"grad_norm": 0.5237210704007911, |
|
"learning_rate": 3.608824378627005e-06, |
|
"loss": 0.7144, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 4.115443037974684, |
|
"grad_norm": 0.5359247596022375, |
|
"learning_rate": 3.5763679848634337e-06, |
|
"loss": 0.6991, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 4.119493670886076, |
|
"grad_norm": 0.5670110358718077, |
|
"learning_rate": 3.544043864774269e-06, |
|
"loss": 0.7179, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 4.123544303797468, |
|
"grad_norm": 0.5106317976449242, |
|
"learning_rate": 3.5118522786935282e-06, |
|
"loss": 0.6753, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 4.127594936708861, |
|
"grad_norm": 0.5401787224612814, |
|
"learning_rate": 3.479793485887819e-06, |
|
"loss": 0.7288, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 4.131645569620253, |
|
"grad_norm": 0.5245119467123522, |
|
"learning_rate": 3.4478677445542653e-06, |
|
"loss": 0.7068, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 4.135696202531646, |
|
"grad_norm": 0.5142973450050567, |
|
"learning_rate": 3.4160753118183767e-06, |
|
"loss": 0.7094, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 4.139746835443038, |
|
"grad_norm": 0.5131404690899716, |
|
"learning_rate": 3.3844164437320527e-06, |
|
"loss": 0.7076, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 4.14379746835443, |
|
"grad_norm": 0.531942802173234, |
|
"learning_rate": 3.3528913952714558e-06, |
|
"loss": 0.7448, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 4.147848101265823, |
|
"grad_norm": 0.5279130126767428, |
|
"learning_rate": 3.321500420335e-06, |
|
"loss": 0.7356, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 4.151898734177215, |
|
"grad_norm": 0.5011034322426758, |
|
"learning_rate": 3.290243771741275e-06, |
|
"loss": 0.6951, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 4.155949367088607, |
|
"grad_norm": 0.5026886527334059, |
|
"learning_rate": 3.2591217012270325e-06, |
|
"loss": 0.7134, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"grad_norm": 0.4913151025852923, |
|
"learning_rate": 3.228134459445149e-06, |
|
"loss": 0.7357, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 4.164050632911392, |
|
"grad_norm": 0.5026351317757181, |
|
"learning_rate": 3.1972822959626205e-06, |
|
"loss": 0.7074, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 4.168101265822785, |
|
"grad_norm": 0.48611841031088965, |
|
"learning_rate": 3.166565459258513e-06, |
|
"loss": 0.7066, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 4.172151898734177, |
|
"grad_norm": 0.4937676282167012, |
|
"learning_rate": 3.1359841967220193e-06, |
|
"loss": 0.7286, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 4.176202531645569, |
|
"grad_norm": 0.5036775199331502, |
|
"learning_rate": 3.105538754650419e-06, |
|
"loss": 0.7049, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 4.180253164556962, |
|
"grad_norm": 0.49405263756675777, |
|
"learning_rate": 3.07522937824712e-06, |
|
"loss": 0.707, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 4.184303797468354, |
|
"grad_norm": 0.5006939255026086, |
|
"learning_rate": 3.0450563116196697e-06, |
|
"loss": 0.7147, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 4.188354430379746, |
|
"grad_norm": 0.4955527623578856, |
|
"learning_rate": 3.0150197977778008e-06, |
|
"loss": 0.737, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 4.192405063291139, |
|
"grad_norm": 0.497062742431914, |
|
"learning_rate": 2.985120078631465e-06, |
|
"loss": 0.7176, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 4.1964556962025314, |
|
"grad_norm": 0.4779180034777856, |
|
"learning_rate": 2.9553573949888893e-06, |
|
"loss": 0.6945, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 4.200506329113924, |
|
"grad_norm": 0.47533393312681765, |
|
"learning_rate": 2.9257319865546384e-06, |
|
"loss": 0.7194, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 4.2045569620253165, |
|
"grad_norm": 0.4974020993572035, |
|
"learning_rate": 2.896244091927678e-06, |
|
"loss": 0.7425, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 4.2086075949367086, |
|
"grad_norm": 0.48921927697542045, |
|
"learning_rate": 2.8668939485994584e-06, |
|
"loss": 0.7071, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 4.2126582278481015, |
|
"grad_norm": 0.49335771658976024, |
|
"learning_rate": 2.837681792951994e-06, |
|
"loss": 0.7048, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 4.216708860759494, |
|
"grad_norm": 0.4895980642763978, |
|
"learning_rate": 2.808607860255981e-06, |
|
"loss": 0.7248, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 4.220759493670886, |
|
"grad_norm": 0.48651937740697204, |
|
"learning_rate": 2.7796723846688634e-06, |
|
"loss": 0.7115, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 4.224810126582279, |
|
"grad_norm": 0.49298515978144175, |
|
"learning_rate": 2.7508755992329937e-06, |
|
"loss": 0.729, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 4.228860759493671, |
|
"grad_norm": 0.5032463421888747, |
|
"learning_rate": 2.722217735873718e-06, |
|
"loss": 0.6966, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 4.232911392405064, |
|
"grad_norm": 0.48271388422857114, |
|
"learning_rate": 2.6936990253975315e-06, |
|
"loss": 0.7047, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 4.236962025316456, |
|
"grad_norm": 0.4806323264074567, |
|
"learning_rate": 2.665319697490205e-06, |
|
"loss": 0.7367, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 4.241012658227848, |
|
"grad_norm": 0.46960717467375185, |
|
"learning_rate": 2.637079980714945e-06, |
|
"loss": 0.7066, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 4.245063291139241, |
|
"grad_norm": 0.48355217119956184, |
|
"learning_rate": 2.6089801025105453e-06, |
|
"loss": 0.7045, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 4.249113924050633, |
|
"grad_norm": 0.4716432708881365, |
|
"learning_rate": 2.581020289189571e-06, |
|
"loss": 0.6977, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 4.253164556962025, |
|
"grad_norm": 0.4725574809379581, |
|
"learning_rate": 2.553200765936501e-06, |
|
"loss": 0.7112, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 4.257215189873418, |
|
"grad_norm": 0.4830757886917016, |
|
"learning_rate": 2.525521756805962e-06, |
|
"loss": 0.7068, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 4.26126582278481, |
|
"grad_norm": 0.47597731087467043, |
|
"learning_rate": 2.497983484720885e-06, |
|
"loss": 0.7022, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 4.265316455696203, |
|
"grad_norm": 0.49040803044903375, |
|
"learning_rate": 2.470586171470728e-06, |
|
"loss": 0.6878, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 4.269367088607595, |
|
"grad_norm": 0.5076461544121226, |
|
"learning_rate": 2.4433300377096836e-06, |
|
"loss": 0.7212, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 4.273417721518987, |
|
"grad_norm": 0.46926411286721886, |
|
"learning_rate": 2.4162153029549073e-06, |
|
"loss": 0.6904, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 4.27746835443038, |
|
"grad_norm": 0.4733786178392552, |
|
"learning_rate": 2.3892421855847458e-06, |
|
"loss": 0.7361, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 4.281518987341772, |
|
"grad_norm": 0.48162073110167114, |
|
"learning_rate": 2.362410902836978e-06, |
|
"loss": 0.7054, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 4.285569620253165, |
|
"grad_norm": 0.47645181853574586, |
|
"learning_rate": 2.3357216708070653e-06, |
|
"loss": 0.7001, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 4.289620253164557, |
|
"grad_norm": 0.4789362085427117, |
|
"learning_rate": 2.309174704446411e-06, |
|
"loss": 0.7117, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 4.293670886075949, |
|
"grad_norm": 0.4739338115898973, |
|
"learning_rate": 2.2827702175606437e-06, |
|
"loss": 0.702, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 4.297721518987342, |
|
"grad_norm": 0.4728264985543456, |
|
"learning_rate": 2.256508422807855e-06, |
|
"loss": 0.7066, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 4.301772151898734, |
|
"grad_norm": 0.47542459012982796, |
|
"learning_rate": 2.230389531696946e-06, |
|
"loss": 0.6996, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 4.305822784810126, |
|
"grad_norm": 0.4599096799650364, |
|
"learning_rate": 2.204413754585857e-06, |
|
"loss": 0.6847, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 4.309873417721519, |
|
"grad_norm": 0.47548168392070383, |
|
"learning_rate": 2.1785813006799406e-06, |
|
"loss": 0.707, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 4.313924050632911, |
|
"grad_norm": 0.48435088860204367, |
|
"learning_rate": 2.1528923780302224e-06, |
|
"loss": 0.7232, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 4.317974683544303, |
|
"grad_norm": 0.49442286877589997, |
|
"learning_rate": 2.127347193531757e-06, |
|
"loss": 0.6955, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 4.322025316455696, |
|
"grad_norm": 0.48550904933855227, |
|
"learning_rate": 2.101945952921942e-06, |
|
"loss": 0.7266, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 4.326075949367088, |
|
"grad_norm": 0.4812781698591079, |
|
"learning_rate": 2.0766888607788906e-06, |
|
"loss": 0.7101, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 4.330126582278481, |
|
"grad_norm": 0.46675464468913747, |
|
"learning_rate": 2.0515761205197337e-06, |
|
"loss": 0.72, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 4.334177215189873, |
|
"grad_norm": 0.48611655178398033, |
|
"learning_rate": 2.0266079343990453e-06, |
|
"loss": 0.7009, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 4.3382278481012655, |
|
"grad_norm": 0.48927445323678875, |
|
"learning_rate": 2.0017845035071494e-06, |
|
"loss": 0.7024, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 4.3422784810126585, |
|
"grad_norm": 0.4735972033414075, |
|
"learning_rate": 1.9771060277685537e-06, |
|
"loss": 0.7059, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 4.3463291139240505, |
|
"grad_norm": 0.477159951185616, |
|
"learning_rate": 1.95257270594031e-06, |
|
"loss": 0.7227, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 4.3503797468354435, |
|
"grad_norm": 0.45951262445016283, |
|
"learning_rate": 1.9281847356104188e-06, |
|
"loss": 0.6971, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 4.3544303797468356, |
|
"grad_norm": 0.4748680227045038, |
|
"learning_rate": 1.9039423131962365e-06, |
|
"loss": 0.6988, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 4.358481012658228, |
|
"grad_norm": 0.45746116176155566, |
|
"learning_rate": 1.8798456339429027e-06, |
|
"loss": 0.6899, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 4.362531645569621, |
|
"grad_norm": 0.4645666415922422, |
|
"learning_rate": 1.8558948919217612e-06, |
|
"loss": 0.7002, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 4.366582278481013, |
|
"grad_norm": 0.4652083972993901, |
|
"learning_rate": 1.8320902800287954e-06, |
|
"loss": 0.7177, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 4.370632911392405, |
|
"grad_norm": 0.4770108084680748, |
|
"learning_rate": 1.8084319899830726e-06, |
|
"loss": 0.6891, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 4.374683544303798, |
|
"grad_norm": 0.48176768318279933, |
|
"learning_rate": 1.7849202123252097e-06, |
|
"loss": 0.7006, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 4.37873417721519, |
|
"grad_norm": 0.47212763192411145, |
|
"learning_rate": 1.7615551364158401e-06, |
|
"loss": 0.7096, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 4.382784810126582, |
|
"grad_norm": 0.4689129888733726, |
|
"learning_rate": 1.738336950434061e-06, |
|
"loss": 0.6986, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 4.386835443037975, |
|
"grad_norm": 0.4840616913988068, |
|
"learning_rate": 1.715265841375957e-06, |
|
"loss": 0.7237, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 4.390886075949367, |
|
"grad_norm": 0.47527393630851306, |
|
"learning_rate": 1.6923419950530684e-06, |
|
"loss": 0.7074, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 4.39493670886076, |
|
"grad_norm": 0.47714534374109047, |
|
"learning_rate": 1.6695655960909008e-06, |
|
"loss": 0.6895, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 4.398987341772152, |
|
"grad_norm": 0.48925435923932126, |
|
"learning_rate": 1.646936827927441e-06, |
|
"loss": 0.7065, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 4.403037974683544, |
|
"grad_norm": 0.45948962182272235, |
|
"learning_rate": 1.6244558728116766e-06, |
|
"loss": 0.691, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 4.407088607594937, |
|
"grad_norm": 0.4660688299187154, |
|
"learning_rate": 1.6021229118021265e-06, |
|
"loss": 0.6935, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 4.411139240506329, |
|
"grad_norm": 0.47675794206183325, |
|
"learning_rate": 1.5799381247653967e-06, |
|
"loss": 0.7134, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 4.415189873417722, |
|
"grad_norm": 0.47828233072302745, |
|
"learning_rate": 1.5579016903747013e-06, |
|
"loss": 0.7009, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 4.419240506329114, |
|
"grad_norm": 0.46162951423460646, |
|
"learning_rate": 1.5360137861084656e-06, |
|
"loss": 0.7021, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 4.423291139240506, |
|
"grad_norm": 0.48133978900255964, |
|
"learning_rate": 1.5142745882488475e-06, |
|
"loss": 0.693, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 4.427341772151899, |
|
"grad_norm": 0.49142779488745536, |
|
"learning_rate": 1.4926842718803691e-06, |
|
"loss": 0.7162, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 4.431392405063291, |
|
"grad_norm": 0.47246042591365944, |
|
"learning_rate": 1.4712430108884657e-06, |
|
"loss": 0.7105, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 4.435443037974683, |
|
"grad_norm": 0.4721199576885026, |
|
"learning_rate": 1.4499509779581078e-06, |
|
"loss": 0.7223, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 4.439493670886076, |
|
"grad_norm": 0.4722969873436003, |
|
"learning_rate": 1.4288083445723988e-06, |
|
"loss": 0.7193, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 4.443544303797468, |
|
"grad_norm": 0.46643506045552124, |
|
"learning_rate": 1.4078152810112045e-06, |
|
"loss": 0.7021, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 4.44759493670886, |
|
"grad_norm": 0.4696312108335307, |
|
"learning_rate": 1.3869719563497697e-06, |
|
"loss": 0.6973, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 4.451645569620253, |
|
"grad_norm": 0.46089568195877834, |
|
"learning_rate": 1.3662785384573663e-06, |
|
"loss": 0.7087, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 4.455696202531645, |
|
"grad_norm": 0.4678116872473261, |
|
"learning_rate": 1.3457351939959383e-06, |
|
"loss": 0.7229, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 4.459746835443038, |
|
"grad_norm": 0.45860861181236184, |
|
"learning_rate": 1.3253420884187551e-06, |
|
"loss": 0.7194, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 4.46379746835443, |
|
"grad_norm": 0.4686949121074428, |
|
"learning_rate": 1.3050993859690953e-06, |
|
"loss": 0.7132, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 4.467848101265822, |
|
"grad_norm": 0.47235307394573145, |
|
"learning_rate": 1.2850072496788869e-06, |
|
"loss": 0.6999, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 4.471898734177215, |
|
"grad_norm": 0.4658385082734779, |
|
"learning_rate": 1.2650658413674434e-06, |
|
"loss": 0.7309, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 4.4759493670886075, |
|
"grad_norm": 0.4698441371042859, |
|
"learning_rate": 1.2452753216401226e-06, |
|
"loss": 0.7091, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"grad_norm": 0.4723970253406769, |
|
"learning_rate": 1.2256358498870503e-06, |
|
"loss": 0.6987, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 4.4840506329113925, |
|
"grad_norm": 0.472892648043723, |
|
"learning_rate": 1.2061475842818337e-06, |
|
"loss": 0.7087, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 4.488101265822785, |
|
"grad_norm": 0.4650378124491558, |
|
"learning_rate": 1.1868106817802816e-06, |
|
"loss": 0.7061, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 4.4921518987341775, |
|
"grad_norm": 0.46548755841832035, |
|
"learning_rate": 1.1676252981191482e-06, |
|
"loss": 0.6844, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 4.49620253164557, |
|
"grad_norm": 0.46968843649622716, |
|
"learning_rate": 1.1485915878148823e-06, |
|
"loss": 0.7102, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 4.500253164556962, |
|
"grad_norm": 0.4618826252665963, |
|
"learning_rate": 1.1297097041623584e-06, |
|
"loss": 0.6856, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 4.504303797468355, |
|
"grad_norm": 0.47673085477356547, |
|
"learning_rate": 1.1109797992336847e-06, |
|
"loss": 0.7081, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 4.508354430379747, |
|
"grad_norm": 0.46503650852728906, |
|
"learning_rate": 1.092402023876933e-06, |
|
"loss": 0.718, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 4.512405063291139, |
|
"grad_norm": 0.4782096702364812, |
|
"learning_rate": 1.0739765277149527e-06, |
|
"loss": 0.7222, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 4.516455696202532, |
|
"grad_norm": 0.4751131078647094, |
|
"learning_rate": 1.0557034591441596e-06, |
|
"loss": 0.7348, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 4.520506329113924, |
|
"grad_norm": 0.4700023534908212, |
|
"learning_rate": 1.0375829653333324e-06, |
|
"loss": 0.7158, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 4.524556962025317, |
|
"grad_norm": 0.4582613269690092, |
|
"learning_rate": 1.0196151922224385e-06, |
|
"loss": 0.6901, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 4.528607594936709, |
|
"grad_norm": 0.4795307530459541, |
|
"learning_rate": 1.0018002845214526e-06, |
|
"loss": 0.7035, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 4.532658227848101, |
|
"grad_norm": 0.46917252196577597, |
|
"learning_rate": 9.841383857091947e-07, |
|
"loss": 0.7111, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 4.536708860759494, |
|
"grad_norm": 0.4768017142268406, |
|
"learning_rate": 9.666296380321616e-07, |
|
"loss": 0.7123, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 4.540759493670886, |
|
"grad_norm": 0.4602520702460931, |
|
"learning_rate": 9.492741825034124e-07, |
|
"loss": 0.6991, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 4.544810126582279, |
|
"grad_norm": 0.45491265014353127, |
|
"learning_rate": 9.320721589013892e-07, |
|
"loss": 0.7077, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 4.548860759493671, |
|
"grad_norm": 0.46805995251792165, |
|
"learning_rate": 9.150237057688339e-07, |
|
"loss": 0.6942, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 4.552911392405063, |
|
"grad_norm": 0.47102629639855165, |
|
"learning_rate": 8.981289604116328e-07, |
|
"loss": 0.712, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 4.556962025316456, |
|
"grad_norm": 0.4606921863716593, |
|
"learning_rate": 8.813880588977542e-07, |
|
"loss": 0.7005, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 4.561012658227848, |
|
"grad_norm": 0.47159463083269837, |
|
"learning_rate": 8.648011360561126e-07, |
|
"loss": 0.7125, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 4.56506329113924, |
|
"grad_norm": 0.46653697035758235, |
|
"learning_rate": 8.483683254755037e-07, |
|
"loss": 0.6809, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 4.569113924050633, |
|
"grad_norm": 0.45984003008181273, |
|
"learning_rate": 8.320897595035227e-07, |
|
"loss": 0.698, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 4.573164556962025, |
|
"grad_norm": 0.47508456329565657, |
|
"learning_rate": 8.159655692455093e-07, |
|
"loss": 0.7191, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 4.577215189873417, |
|
"grad_norm": 0.46884346133679355, |
|
"learning_rate": 7.999958845634648e-07, |
|
"loss": 0.7022, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 4.58126582278481, |
|
"grad_norm": 0.4741204135737283, |
|
"learning_rate": 7.841808340750478e-07, |
|
"loss": 0.6959, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 4.585316455696202, |
|
"grad_norm": 0.46159415365960194, |
|
"learning_rate": 7.685205451524869e-07, |
|
"loss": 0.7011, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 4.589367088607595, |
|
"grad_norm": 0.4647851577622568, |
|
"learning_rate": 7.530151439216027e-07, |
|
"loss": 0.7086, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 4.593417721518987, |
|
"grad_norm": 0.46909601422078095, |
|
"learning_rate": 7.376647552607675e-07, |
|
"loss": 0.7196, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 4.597468354430379, |
|
"grad_norm": 0.47378320439885285, |
|
"learning_rate": 7.224695027998963e-07, |
|
"loss": 0.7233, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 4.601518987341772, |
|
"grad_norm": 0.4670281449888087, |
|
"learning_rate": 7.07429508919466e-07, |
|
"loss": 0.6916, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 4.605569620253164, |
|
"grad_norm": 0.468118557929843, |
|
"learning_rate": 6.925448947495206e-07, |
|
"loss": 0.6979, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 4.609620253164557, |
|
"grad_norm": 0.4557014633707797, |
|
"learning_rate": 6.778157801686936e-07, |
|
"loss": 0.7017, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 4.613670886075949, |
|
"grad_norm": 0.47171535529063435, |
|
"learning_rate": 6.632422838032515e-07, |
|
"loss": 0.7083, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 4.6177215189873415, |
|
"grad_norm": 0.4710862827629821, |
|
"learning_rate": 6.488245230261281e-07, |
|
"loss": 0.7336, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 4.6217721518987345, |
|
"grad_norm": 0.47310489980271214, |
|
"learning_rate": 6.345626139559868e-07, |
|
"loss": 0.7248, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 4.6258227848101265, |
|
"grad_norm": 0.462440180107535, |
|
"learning_rate": 6.204566714562866e-07, |
|
"loss": 0.71, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 4.629873417721519, |
|
"grad_norm": 0.47650528083724153, |
|
"learning_rate": 6.06506809134344e-07, |
|
"loss": 0.7152, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 4.633924050632912, |
|
"grad_norm": 0.4663516529158096, |
|
"learning_rate": 5.927131393404373e-07, |
|
"loss": 0.7208, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 4.637974683544304, |
|
"grad_norm": 0.46923847987254025, |
|
"learning_rate": 5.790757731668817e-07, |
|
"loss": 0.7118, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 4.642025316455696, |
|
"grad_norm": 0.4656918173865396, |
|
"learning_rate": 5.655948204471507e-07, |
|
"loss": 0.6998, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 4.646075949367089, |
|
"grad_norm": 0.4542669393840388, |
|
"learning_rate": 5.522703897549875e-07, |
|
"loss": 0.7196, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 4.650126582278481, |
|
"grad_norm": 0.465328690030621, |
|
"learning_rate": 5.391025884035239e-07, |
|
"loss": 0.7186, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 4.654177215189874, |
|
"grad_norm": 0.4596199034954166, |
|
"learning_rate": 5.260915224444207e-07, |
|
"loss": 0.709, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 4.658227848101266, |
|
"grad_norm": 0.46205374841501895, |
|
"learning_rate": 5.132372966670129e-07, |
|
"loss": 0.7079, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 4.662278481012658, |
|
"grad_norm": 0.4597931242942473, |
|
"learning_rate": 5.005400145974704e-07, |
|
"loss": 0.7037, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 4.666329113924051, |
|
"grad_norm": 0.47431235279394923, |
|
"learning_rate": 4.879997784979562e-07, |
|
"loss": 0.7268, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 4.670379746835443, |
|
"grad_norm": 0.4622505740616268, |
|
"learning_rate": 4.7561668936580984e-07, |
|
"loss": 0.6881, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 4.674430379746836, |
|
"grad_norm": 0.46061954957770435, |
|
"learning_rate": 4.6339084693272306e-07, |
|
"loss": 0.7126, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 4.678481012658228, |
|
"grad_norm": 0.4630447338638172, |
|
"learning_rate": 4.5132234966395847e-07, |
|
"loss": 0.6973, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 4.68253164556962, |
|
"grad_norm": 0.4588114501106474, |
|
"learning_rate": 4.3941129475752795e-07, |
|
"loss": 0.7021, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 4.686582278481013, |
|
"grad_norm": 0.46019262827718294, |
|
"learning_rate": 4.27657778143431e-07, |
|
"loss": 0.7121, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 4.690632911392405, |
|
"grad_norm": 0.46305099118640536, |
|
"learning_rate": 4.1606189448287757e-07, |
|
"loss": 0.7073, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 4.694683544303797, |
|
"grad_norm": 0.4621325349031714, |
|
"learning_rate": 4.046237371675177e-07, |
|
"loss": 0.71, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 4.69873417721519, |
|
"grad_norm": 0.4658496649342986, |
|
"learning_rate": 3.9334339831869963e-07, |
|
"loss": 0.6886, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 4.702784810126582, |
|
"grad_norm": 0.46216732772858105, |
|
"learning_rate": 3.8222096878671955e-07, |
|
"loss": 0.6892, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 4.706835443037974, |
|
"grad_norm": 0.4554300388636575, |
|
"learning_rate": 3.7125653815009545e-07, |
|
"loss": 0.695, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 4.710886075949367, |
|
"grad_norm": 0.46674816950612363, |
|
"learning_rate": 3.6045019471484974e-07, |
|
"loss": 0.7045, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 4.714936708860759, |
|
"grad_norm": 0.4632920066521023, |
|
"learning_rate": 3.498020255137813e-07, |
|
"loss": 0.6981, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 4.718987341772152, |
|
"grad_norm": 0.5784518087058715, |
|
"learning_rate": 3.393121163057811e-07, |
|
"loss": 0.7119, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 4.723037974683544, |
|
"grad_norm": 0.46060896568713544, |
|
"learning_rate": 3.289805515751399e-07, |
|
"loss": 0.7026, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 4.727088607594936, |
|
"grad_norm": 0.4634375075621392, |
|
"learning_rate": 3.188074145308573e-07, |
|
"loss": 0.6847, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 4.731139240506329, |
|
"grad_norm": 0.46709061342816877, |
|
"learning_rate": 3.087927871059804e-07, |
|
"loss": 0.686, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 4.735189873417721, |
|
"grad_norm": 0.46592879749631144, |
|
"learning_rate": 2.989367499569418e-07, |
|
"loss": 0.7241, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 4.739240506329114, |
|
"grad_norm": 0.46392860128183516, |
|
"learning_rate": 2.8923938246290917e-07, |
|
"loss": 0.7128, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 4.743291139240506, |
|
"grad_norm": 0.46759426172925156, |
|
"learning_rate": 2.7970076272514804e-07, |
|
"loss": 0.7049, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 4.7473417721518985, |
|
"grad_norm": 0.46287866388708293, |
|
"learning_rate": 2.703209675663887e-07, |
|
"loss": 0.7065, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 4.751392405063291, |
|
"grad_norm": 0.46086104488780216, |
|
"learning_rate": 2.6110007253021374e-07, |
|
"loss": 0.7068, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 4.7554430379746835, |
|
"grad_norm": 0.4677580888780486, |
|
"learning_rate": 2.520381518804471e-07, |
|
"loss": 0.6714, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 4.759493670886076, |
|
"grad_norm": 0.4773580051540583, |
|
"learning_rate": 2.4313527860054585e-07, |
|
"loss": 0.7073, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 4.7635443037974685, |
|
"grad_norm": 0.4671885101235506, |
|
"learning_rate": 2.343915243930317e-07, |
|
"loss": 0.7346, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 4.767594936708861, |
|
"grad_norm": 0.4834070358252625, |
|
"learning_rate": 2.2580695967889367e-07, |
|
"loss": 0.7071, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 4.7716455696202535, |
|
"grad_norm": 0.4660824574416042, |
|
"learning_rate": 2.1738165359704189e-07, |
|
"loss": 0.6831, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 4.775696202531646, |
|
"grad_norm": 0.4799221334363283, |
|
"learning_rate": 2.0911567400373257e-07, |
|
"loss": 0.7131, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 4.779746835443038, |
|
"grad_norm": 0.4629456904000208, |
|
"learning_rate": 2.0100908747202607e-07, |
|
"loss": 0.6919, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 4.783797468354431, |
|
"grad_norm": 0.4680967917390402, |
|
"learning_rate": 1.9306195929125638e-07, |
|
"loss": 0.7074, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 4.787848101265823, |
|
"grad_norm": 0.46200443816457737, |
|
"learning_rate": 1.8527435346650247e-07, |
|
"loss": 0.7238, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 4.791898734177215, |
|
"grad_norm": 0.461111139652867, |
|
"learning_rate": 1.7764633271807108e-07, |
|
"loss": 0.7066, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 4.795949367088608, |
|
"grad_norm": 0.4606576337140547, |
|
"learning_rate": 1.7017795848099262e-07, |
|
"loss": 0.7028, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"grad_norm": 0.4571636066044727, |
|
"learning_rate": 1.6286929090452596e-07, |
|
"loss": 0.7149, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 4.804050632911393, |
|
"grad_norm": 0.4567010754856929, |
|
"learning_rate": 1.557203888516745e-07, |
|
"loss": 0.7149, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 4.808101265822785, |
|
"grad_norm": 0.4622601385178996, |
|
"learning_rate": 1.487313098987131e-07, |
|
"loss": 0.7175, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 4.812151898734177, |
|
"grad_norm": 0.4587546617654815, |
|
"learning_rate": 1.4190211033472402e-07, |
|
"loss": 0.7187, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 4.81620253164557, |
|
"grad_norm": 0.4667702556420012, |
|
"learning_rate": 1.3523284516113955e-07, |
|
"loss": 0.6966, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 4.820253164556962, |
|
"grad_norm": 0.4599501619745314, |
|
"learning_rate": 1.2872356809130682e-07, |
|
"loss": 0.7119, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 4.824303797468355, |
|
"grad_norm": 0.46612847518901457, |
|
"learning_rate": 1.2237433155004807e-07, |
|
"loss": 0.7172, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 4.828354430379747, |
|
"grad_norm": 0.4660602617446201, |
|
"learning_rate": 1.1618518667323886e-07, |
|
"loss": 0.7223, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 4.832405063291139, |
|
"grad_norm": 0.46509600894492625, |
|
"learning_rate": 1.1015618330740385e-07, |
|
"loss": 0.7254, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 4.836455696202532, |
|
"grad_norm": 0.4590671348062732, |
|
"learning_rate": 1.042873700093061e-07, |
|
"loss": 0.7212, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 4.840506329113924, |
|
"grad_norm": 0.4664343263030432, |
|
"learning_rate": 9.857879404556291e-08, |
|
"loss": 0.7155, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 4.844556962025316, |
|
"grad_norm": 0.4592604405574057, |
|
"learning_rate": 9.303050139225722e-08, |
|
"loss": 0.7137, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 4.848607594936709, |
|
"grad_norm": 0.4576097655122136, |
|
"learning_rate": 8.76425367345779e-08, |
|
"loss": 0.703, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 4.852658227848101, |
|
"grad_norm": 0.4659712553498694, |
|
"learning_rate": 8.241494346644897e-08, |
|
"loss": 0.6932, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 4.856708860759493, |
|
"grad_norm": 0.4627341817635609, |
|
"learning_rate": 7.734776369019204e-08, |
|
"loss": 0.7063, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 4.860759493670886, |
|
"grad_norm": 0.4672227961895586, |
|
"learning_rate": 7.244103821617332e-08, |
|
"loss": 0.7171, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 4.864810126582278, |
|
"grad_norm": 0.46056636100330667, |
|
"learning_rate": 6.769480656248606e-08, |
|
"loss": 0.7139, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 4.868860759493671, |
|
"grad_norm": 0.4594705702046612, |
|
"learning_rate": 6.310910695462635e-08, |
|
"loss": 0.7122, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 4.872911392405063, |
|
"grad_norm": 0.46344855757470726, |
|
"learning_rate": 5.8683976325191185e-08, |
|
"loss": 0.7212, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 4.876962025316455, |
|
"grad_norm": 0.4582634148107596, |
|
"learning_rate": 5.4419450313571984e-08, |
|
"loss": 0.7025, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 4.881012658227848, |
|
"grad_norm": 0.464827815867559, |
|
"learning_rate": 5.031556326567488e-08, |
|
"loss": 0.718, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 4.88506329113924, |
|
"grad_norm": 0.45907820109757347, |
|
"learning_rate": 4.637234823364312e-08, |
|
"loss": 0.7111, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 4.889113924050633, |
|
"grad_norm": 0.46204180024544145, |
|
"learning_rate": 4.258983697558838e-08, |
|
"loss": 0.6927, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 4.8931645569620255, |
|
"grad_norm": 0.4590570358367952, |
|
"learning_rate": 3.896805995533548e-08, |
|
"loss": 0.703, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 4.8972151898734175, |
|
"grad_norm": 0.4649979453970776, |
|
"learning_rate": 3.550704634218028e-08, |
|
"loss": 0.6942, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 4.9012658227848105, |
|
"grad_norm": 0.4585177500740186, |
|
"learning_rate": 3.2206824010647676e-08, |
|
"loss": 0.6991, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 4.905316455696203, |
|
"grad_norm": 0.45988955561723205, |
|
"learning_rate": 2.9067419540278476e-08, |
|
"loss": 0.6931, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 4.909367088607595, |
|
"grad_norm": 0.46160509375120545, |
|
"learning_rate": 2.6088858215400638e-08, |
|
"loss": 0.6957, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 4.913417721518988, |
|
"grad_norm": 0.45770905649703353, |
|
"learning_rate": 2.3271164024940564e-08, |
|
"loss": 0.689, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 4.91746835443038, |
|
"grad_norm": 0.4615418488797541, |
|
"learning_rate": 2.061435966221881e-08, |
|
"loss": 0.7012, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 4.921518987341772, |
|
"grad_norm": 0.46173248429569286, |
|
"learning_rate": 1.811846652477245e-08, |
|
"loss": 0.7246, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 4.925569620253165, |
|
"grad_norm": 0.4619400354151063, |
|
"learning_rate": 1.5783504714184106e-08, |
|
"loss": 0.7158, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 4.929620253164557, |
|
"grad_norm": 0.46386561139423577, |
|
"learning_rate": 1.360949303591097e-08, |
|
"loss": 0.7154, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 4.93367088607595, |
|
"grad_norm": 0.4611610621816007, |
|
"learning_rate": 1.1596448999144916e-08, |
|
"loss": 0.7137, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 4.937721518987342, |
|
"grad_norm": 0.45992161464926373, |
|
"learning_rate": 9.744388816668172e-09, |
|
"loss": 0.7095, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 4.941772151898734, |
|
"grad_norm": 0.45308759591341885, |
|
"learning_rate": 8.05332740472009e-09, |
|
"loss": 0.7081, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 4.945822784810127, |
|
"grad_norm": 0.6123635344569365, |
|
"learning_rate": 6.523278382872811e-09, |
|
"loss": 0.7113, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 4.949873417721519, |
|
"grad_norm": 0.47072404686160174, |
|
"learning_rate": 5.15425407393133e-09, |
|
"loss": 0.6879, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 4.953924050632912, |
|
"grad_norm": 0.46638974962549407, |
|
"learning_rate": 3.94626550383137e-09, |
|
"loss": 0.7078, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 4.957974683544304, |
|
"grad_norm": 0.46140620537082006, |
|
"learning_rate": 2.899322401546112e-09, |
|
"loss": 0.7028, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 4.962025316455696, |
|
"grad_norm": 0.46919808962795967, |
|
"learning_rate": 2.013433199010706e-09, |
|
"loss": 0.6982, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 4.966075949367089, |
|
"grad_norm": 0.46170609712707994, |
|
"learning_rate": 1.2886050310556563e-09, |
|
"loss": 0.7185, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 4.970126582278481, |
|
"grad_norm": 0.45836213476764615, |
|
"learning_rate": 7.248437353468695e-10, |
|
"loss": 0.6838, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 4.974177215189873, |
|
"grad_norm": 0.46897800618641133, |
|
"learning_rate": 3.221538523412449e-10, |
|
"loss": 0.7145, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 4.978227848101266, |
|
"grad_norm": 0.455994247804533, |
|
"learning_rate": 8.053862524670663e-11, |
|
"loss": 0.6984, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 4.982278481012658, |
|
"grad_norm": 0.4603004127990118, |
|
"learning_rate": 0.0, |
|
"loss": 0.7094, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 4.982278481012658, |
|
"step": 1230, |
|
"total_flos": 2.1316152429779026e+18, |
|
"train_loss": 1.01131284905643, |
|
"train_runtime": 15678.0836, |
|
"train_samples_per_second": 10.078, |
|
"train_steps_per_second": 0.078 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1230, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.1316152429779026e+18, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|