|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.988354430379747, |
|
"eval_steps": 500, |
|
"global_step": 1230, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.004050632911392405, |
|
"grad_norm": 9.318807356487937, |
|
"learning_rate": 3.2520325203252037e-07, |
|
"loss": 0.9656, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00810126582278481, |
|
"grad_norm": 9.218275264197594, |
|
"learning_rate": 6.504065040650407e-07, |
|
"loss": 0.9308, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.012151898734177215, |
|
"grad_norm": 9.421891560829028, |
|
"learning_rate": 9.75609756097561e-07, |
|
"loss": 0.9957, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01620253164556962, |
|
"grad_norm": 9.365453877677156, |
|
"learning_rate": 1.3008130081300815e-06, |
|
"loss": 0.9567, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.020253164556962026, |
|
"grad_norm": 9.208143949182029, |
|
"learning_rate": 1.6260162601626018e-06, |
|
"loss": 0.958, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02430379746835443, |
|
"grad_norm": 8.495033190889078, |
|
"learning_rate": 1.951219512195122e-06, |
|
"loss": 0.9323, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.028354430379746835, |
|
"grad_norm": 6.6181129010629505, |
|
"learning_rate": 2.2764227642276426e-06, |
|
"loss": 0.8509, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.03240506329113924, |
|
"grad_norm": 6.475784613297617, |
|
"learning_rate": 2.601626016260163e-06, |
|
"loss": 0.8414, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.03645569620253165, |
|
"grad_norm": 4.066084299671373, |
|
"learning_rate": 2.926829268292683e-06, |
|
"loss": 0.7901, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.04050632911392405, |
|
"grad_norm": 3.3728646622988006, |
|
"learning_rate": 3.2520325203252037e-06, |
|
"loss": 0.766, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.044556962025316456, |
|
"grad_norm": 3.1062159968625567, |
|
"learning_rate": 3.577235772357724e-06, |
|
"loss": 0.734, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.04860759493670886, |
|
"grad_norm": 3.9578468675513263, |
|
"learning_rate": 3.902439024390244e-06, |
|
"loss": 0.7277, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.052658227848101265, |
|
"grad_norm": 3.900301399470025, |
|
"learning_rate": 4.227642276422765e-06, |
|
"loss": 0.715, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.05670886075949367, |
|
"grad_norm": 3.5460739718887906, |
|
"learning_rate": 4.552845528455285e-06, |
|
"loss": 0.6798, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.060759493670886074, |
|
"grad_norm": 2.992589772819424, |
|
"learning_rate": 4.8780487804878055e-06, |
|
"loss": 0.6616, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.06481012658227848, |
|
"grad_norm": 2.7963421775038113, |
|
"learning_rate": 5.203252032520326e-06, |
|
"loss": 0.6049, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.06886075949367089, |
|
"grad_norm": 2.7532587010945844, |
|
"learning_rate": 5.528455284552846e-06, |
|
"loss": 0.5933, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0729113924050633, |
|
"grad_norm": 2.4574082270714133, |
|
"learning_rate": 5.853658536585366e-06, |
|
"loss": 0.5831, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0769620253164557, |
|
"grad_norm": 1.877193827342358, |
|
"learning_rate": 6.178861788617887e-06, |
|
"loss": 0.5785, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0810126582278481, |
|
"grad_norm": 1.4876994275404645, |
|
"learning_rate": 6.504065040650407e-06, |
|
"loss": 0.5678, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.08506329113924051, |
|
"grad_norm": 1.54854115582571, |
|
"learning_rate": 6.829268292682928e-06, |
|
"loss": 0.5466, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.08911392405063291, |
|
"grad_norm": 1.857961385185687, |
|
"learning_rate": 7.154471544715448e-06, |
|
"loss": 0.5387, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.09316455696202532, |
|
"grad_norm": 1.6497304021704264, |
|
"learning_rate": 7.4796747967479676e-06, |
|
"loss": 0.5349, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.09721518987341772, |
|
"grad_norm": 1.1456758016283615, |
|
"learning_rate": 7.804878048780489e-06, |
|
"loss": 0.4984, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.10126582278481013, |
|
"grad_norm": 1.0740616861475445, |
|
"learning_rate": 8.130081300813009e-06, |
|
"loss": 0.4905, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.10531645569620253, |
|
"grad_norm": 1.0466532273693914, |
|
"learning_rate": 8.45528455284553e-06, |
|
"loss": 0.5018, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.10936708860759493, |
|
"grad_norm": 1.02667955906661, |
|
"learning_rate": 8.78048780487805e-06, |
|
"loss": 0.5011, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.11341772151898734, |
|
"grad_norm": 0.9776011138828424, |
|
"learning_rate": 9.10569105691057e-06, |
|
"loss": 0.4855, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.11746835443037974, |
|
"grad_norm": 0.8201720765149819, |
|
"learning_rate": 9.43089430894309e-06, |
|
"loss": 0.4846, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.12151898734177215, |
|
"grad_norm": 0.8097192121504712, |
|
"learning_rate": 9.756097560975611e-06, |
|
"loss": 0.472, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.12556962025316457, |
|
"grad_norm": 0.7937495823532817, |
|
"learning_rate": 1.008130081300813e-05, |
|
"loss": 0.4847, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.12962025316455697, |
|
"grad_norm": 0.6887506840396479, |
|
"learning_rate": 1.0406504065040652e-05, |
|
"loss": 0.4649, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.13367088607594937, |
|
"grad_norm": 0.7829798206485311, |
|
"learning_rate": 1.0731707317073172e-05, |
|
"loss": 0.4708, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.13772151898734178, |
|
"grad_norm": 0.7234986215372537, |
|
"learning_rate": 1.1056910569105692e-05, |
|
"loss": 0.4944, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.14177215189873418, |
|
"grad_norm": 0.6878847915973635, |
|
"learning_rate": 1.1382113821138213e-05, |
|
"loss": 0.4386, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.1458227848101266, |
|
"grad_norm": 0.6914667668493432, |
|
"learning_rate": 1.1707317073170731e-05, |
|
"loss": 0.4362, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.149873417721519, |
|
"grad_norm": 0.7719366637921463, |
|
"learning_rate": 1.2032520325203254e-05, |
|
"loss": 0.4438, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.1539240506329114, |
|
"grad_norm": 0.7819402304189363, |
|
"learning_rate": 1.2357723577235774e-05, |
|
"loss": 0.459, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.1579746835443038, |
|
"grad_norm": 0.7011440822451196, |
|
"learning_rate": 1.2682926829268294e-05, |
|
"loss": 0.4449, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.1620253164556962, |
|
"grad_norm": 0.8902827791944216, |
|
"learning_rate": 1.3008130081300815e-05, |
|
"loss": 0.4605, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.1660759493670886, |
|
"grad_norm": 0.6148554261564595, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 0.4364, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.17012658227848101, |
|
"grad_norm": 0.7158634479026297, |
|
"learning_rate": 1.3658536585365855e-05, |
|
"loss": 0.448, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.17417721518987342, |
|
"grad_norm": 0.6554077570372727, |
|
"learning_rate": 1.3983739837398376e-05, |
|
"loss": 0.4472, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.17822784810126582, |
|
"grad_norm": 0.7544605903574897, |
|
"learning_rate": 1.4308943089430896e-05, |
|
"loss": 0.4495, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.18227848101265823, |
|
"grad_norm": 0.7735838482073109, |
|
"learning_rate": 1.4634146341463415e-05, |
|
"loss": 0.4395, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.18632911392405063, |
|
"grad_norm": 0.6870970939034028, |
|
"learning_rate": 1.4959349593495935e-05, |
|
"loss": 0.4556, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.19037974683544304, |
|
"grad_norm": 0.8193928272709203, |
|
"learning_rate": 1.528455284552846e-05, |
|
"loss": 0.4364, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.19443037974683544, |
|
"grad_norm": 0.6972735418404649, |
|
"learning_rate": 1.5609756097560978e-05, |
|
"loss": 0.4448, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.19848101265822785, |
|
"grad_norm": 0.7047744255338407, |
|
"learning_rate": 1.5934959349593496e-05, |
|
"loss": 0.4435, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.20253164556962025, |
|
"grad_norm": 0.8067647848067095, |
|
"learning_rate": 1.6260162601626018e-05, |
|
"loss": 0.412, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.20658227848101265, |
|
"grad_norm": 0.7545001794560968, |
|
"learning_rate": 1.6585365853658537e-05, |
|
"loss": 0.4181, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.21063291139240506, |
|
"grad_norm": 0.8278611417400091, |
|
"learning_rate": 1.691056910569106e-05, |
|
"loss": 0.4302, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.21468354430379746, |
|
"grad_norm": 0.6813320439199824, |
|
"learning_rate": 1.7235772357723578e-05, |
|
"loss": 0.4236, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.21873417721518987, |
|
"grad_norm": 0.8649241717956722, |
|
"learning_rate": 1.75609756097561e-05, |
|
"loss": 0.4258, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.22278481012658227, |
|
"grad_norm": 0.6823450913135959, |
|
"learning_rate": 1.788617886178862e-05, |
|
"loss": 0.4301, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.22683544303797468, |
|
"grad_norm": 0.9637679266976064, |
|
"learning_rate": 1.821138211382114e-05, |
|
"loss": 0.4339, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.23088607594936708, |
|
"grad_norm": 0.84579275640571, |
|
"learning_rate": 1.8536585365853663e-05, |
|
"loss": 0.4371, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.23493670886075949, |
|
"grad_norm": 0.9172398847093063, |
|
"learning_rate": 1.886178861788618e-05, |
|
"loss": 0.4256, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.2389873417721519, |
|
"grad_norm": 1.0344939870328795, |
|
"learning_rate": 1.91869918699187e-05, |
|
"loss": 0.4203, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.2430379746835443, |
|
"grad_norm": 0.8385770685825575, |
|
"learning_rate": 1.9512195121951222e-05, |
|
"loss": 0.4196, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.2470886075949367, |
|
"grad_norm": 0.8104089082714245, |
|
"learning_rate": 1.983739837398374e-05, |
|
"loss": 0.4193, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.25113924050632913, |
|
"grad_norm": 0.8695776194161264, |
|
"learning_rate": 2.016260162601626e-05, |
|
"loss": 0.448, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.25518987341772154, |
|
"grad_norm": 0.8886872222749782, |
|
"learning_rate": 2.048780487804878e-05, |
|
"loss": 0.4279, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.25924050632911394, |
|
"grad_norm": 0.75361128240426, |
|
"learning_rate": 2.0813008130081303e-05, |
|
"loss": 0.4211, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.26329113924050634, |
|
"grad_norm": 0.7928701594707984, |
|
"learning_rate": 2.1138211382113822e-05, |
|
"loss": 0.415, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.26734177215189875, |
|
"grad_norm": 0.7463337197002735, |
|
"learning_rate": 2.1463414634146344e-05, |
|
"loss": 0.4109, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.27139240506329115, |
|
"grad_norm": 0.8469273073259479, |
|
"learning_rate": 2.1788617886178863e-05, |
|
"loss": 0.4126, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.27544303797468356, |
|
"grad_norm": 0.779993369066007, |
|
"learning_rate": 2.2113821138211385e-05, |
|
"loss": 0.4171, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.27949367088607596, |
|
"grad_norm": 0.7460317930287893, |
|
"learning_rate": 2.2439024390243907e-05, |
|
"loss": 0.4269, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.28354430379746837, |
|
"grad_norm": 0.9342576732900987, |
|
"learning_rate": 2.2764227642276426e-05, |
|
"loss": 0.4143, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.28759493670886077, |
|
"grad_norm": 0.8407617816338501, |
|
"learning_rate": 2.3089430894308948e-05, |
|
"loss": 0.4052, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.2916455696202532, |
|
"grad_norm": 1.050918247951831, |
|
"learning_rate": 2.3414634146341463e-05, |
|
"loss": 0.4217, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.2956962025316456, |
|
"grad_norm": 0.6867608858139824, |
|
"learning_rate": 2.3739837398373985e-05, |
|
"loss": 0.4221, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.299746835443038, |
|
"grad_norm": 0.9984194297643659, |
|
"learning_rate": 2.4065040650406507e-05, |
|
"loss": 0.4199, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.3037974683544304, |
|
"grad_norm": 0.6956494731029399, |
|
"learning_rate": 2.4390243902439026e-05, |
|
"loss": 0.4157, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.3078481012658228, |
|
"grad_norm": 0.8043534166805422, |
|
"learning_rate": 2.4715447154471548e-05, |
|
"loss": 0.4116, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.3118987341772152, |
|
"grad_norm": 0.6241600022911961, |
|
"learning_rate": 2.5040650406504066e-05, |
|
"loss": 0.4103, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.3159493670886076, |
|
"grad_norm": 0.8329838653144952, |
|
"learning_rate": 2.536585365853659e-05, |
|
"loss": 0.436, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.6961573610283888, |
|
"learning_rate": 2.569105691056911e-05, |
|
"loss": 0.4069, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.3240506329113924, |
|
"grad_norm": 0.6396086433948602, |
|
"learning_rate": 2.601626016260163e-05, |
|
"loss": 0.4022, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.3281012658227848, |
|
"grad_norm": 0.7947523581793382, |
|
"learning_rate": 2.634146341463415e-05, |
|
"loss": 0.4163, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.3321518987341772, |
|
"grad_norm": 0.7202728275057191, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 0.403, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.3362025316455696, |
|
"grad_norm": 0.6086428506489437, |
|
"learning_rate": 2.699186991869919e-05, |
|
"loss": 0.411, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.34025316455696203, |
|
"grad_norm": 0.9564247917220637, |
|
"learning_rate": 2.731707317073171e-05, |
|
"loss": 0.4102, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.34430379746835443, |
|
"grad_norm": 0.9911378966264747, |
|
"learning_rate": 2.764227642276423e-05, |
|
"loss": 0.4161, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.34835443037974684, |
|
"grad_norm": 0.9289658047692708, |
|
"learning_rate": 2.796747967479675e-05, |
|
"loss": 0.4084, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.35240506329113924, |
|
"grad_norm": 0.7673299163248022, |
|
"learning_rate": 2.829268292682927e-05, |
|
"loss": 0.404, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.35645569620253165, |
|
"grad_norm": 0.9861243499895322, |
|
"learning_rate": 2.8617886178861792e-05, |
|
"loss": 0.4021, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.36050632911392405, |
|
"grad_norm": 1.142593460084511, |
|
"learning_rate": 2.8943089430894314e-05, |
|
"loss": 0.4072, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.36455696202531646, |
|
"grad_norm": 0.7911982126033097, |
|
"learning_rate": 2.926829268292683e-05, |
|
"loss": 0.4076, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.36860759493670886, |
|
"grad_norm": 0.7540600539853817, |
|
"learning_rate": 2.959349593495935e-05, |
|
"loss": 0.408, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.37265822784810126, |
|
"grad_norm": 0.9190497695466016, |
|
"learning_rate": 2.991869918699187e-05, |
|
"loss": 0.4231, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.37670886075949367, |
|
"grad_norm": 0.8110807528312284, |
|
"learning_rate": 3.0243902439024392e-05, |
|
"loss": 0.3928, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.3807594936708861, |
|
"grad_norm": 0.9217985479822483, |
|
"learning_rate": 3.056910569105692e-05, |
|
"loss": 0.409, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.3848101265822785, |
|
"grad_norm": 0.8625824189142716, |
|
"learning_rate": 3.089430894308943e-05, |
|
"loss": 0.4116, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.3888607594936709, |
|
"grad_norm": 0.8059790650581811, |
|
"learning_rate": 3.1219512195121955e-05, |
|
"loss": 0.4181, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.3929113924050633, |
|
"grad_norm": 0.8628809905170578, |
|
"learning_rate": 3.154471544715447e-05, |
|
"loss": 0.4231, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.3969620253164557, |
|
"grad_norm": 0.7797115893285762, |
|
"learning_rate": 3.186991869918699e-05, |
|
"loss": 0.4039, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.4010126582278481, |
|
"grad_norm": 0.788051818426161, |
|
"learning_rate": 3.2195121951219514e-05, |
|
"loss": 0.3964, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.4050632911392405, |
|
"grad_norm": 0.7932689573264317, |
|
"learning_rate": 3.2520325203252037e-05, |
|
"loss": 0.3999, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.4091139240506329, |
|
"grad_norm": 0.6838297862297614, |
|
"learning_rate": 3.284552845528456e-05, |
|
"loss": 0.4143, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.4131645569620253, |
|
"grad_norm": 0.7573159452261299, |
|
"learning_rate": 3.3170731707317074e-05, |
|
"loss": 0.4079, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.4172151898734177, |
|
"grad_norm": 0.578588648735455, |
|
"learning_rate": 3.3495934959349596e-05, |
|
"loss": 0.3969, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.4212658227848101, |
|
"grad_norm": 0.6389926477736334, |
|
"learning_rate": 3.382113821138212e-05, |
|
"loss": 0.3997, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.4253164556962025, |
|
"grad_norm": 0.5998679542739453, |
|
"learning_rate": 3.414634146341463e-05, |
|
"loss": 0.4117, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.4293670886075949, |
|
"grad_norm": 0.646206759696015, |
|
"learning_rate": 3.4471544715447155e-05, |
|
"loss": 0.4172, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.43341772151898733, |
|
"grad_norm": 0.7292411533543358, |
|
"learning_rate": 3.479674796747968e-05, |
|
"loss": 0.4085, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.43746835443037974, |
|
"grad_norm": 0.7127360108772903, |
|
"learning_rate": 3.51219512195122e-05, |
|
"loss": 0.3967, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.44151898734177214, |
|
"grad_norm": 0.7259831230072336, |
|
"learning_rate": 3.544715447154472e-05, |
|
"loss": 0.4099, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.44556962025316454, |
|
"grad_norm": 0.6906024336059372, |
|
"learning_rate": 3.577235772357724e-05, |
|
"loss": 0.4007, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.44962025316455695, |
|
"grad_norm": 0.6459505059471755, |
|
"learning_rate": 3.609756097560976e-05, |
|
"loss": 0.4272, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.45367088607594935, |
|
"grad_norm": 0.7704177964487694, |
|
"learning_rate": 3.642276422764228e-05, |
|
"loss": 0.4006, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.45772151898734176, |
|
"grad_norm": 0.7214536768236992, |
|
"learning_rate": 3.67479674796748e-05, |
|
"loss": 0.4238, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.46177215189873416, |
|
"grad_norm": 0.697549486038351, |
|
"learning_rate": 3.7073170731707325e-05, |
|
"loss": 0.4181, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.46582278481012657, |
|
"grad_norm": 0.8668149648173239, |
|
"learning_rate": 3.739837398373984e-05, |
|
"loss": 0.4099, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.46987341772151897, |
|
"grad_norm": 0.5639812990887104, |
|
"learning_rate": 3.772357723577236e-05, |
|
"loss": 0.3922, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.4739240506329114, |
|
"grad_norm": 0.6669312830758315, |
|
"learning_rate": 3.804878048780488e-05, |
|
"loss": 0.39, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.4779746835443038, |
|
"grad_norm": 0.7565876525779722, |
|
"learning_rate": 3.83739837398374e-05, |
|
"loss": 0.4104, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.4820253164556962, |
|
"grad_norm": 0.6969728187119064, |
|
"learning_rate": 3.869918699186992e-05, |
|
"loss": 0.407, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.4860759493670886, |
|
"grad_norm": 0.8706879234061329, |
|
"learning_rate": 3.9024390243902444e-05, |
|
"loss": 0.4112, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.490126582278481, |
|
"grad_norm": 0.7174404291292026, |
|
"learning_rate": 3.9349593495934966e-05, |
|
"loss": 0.383, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.4941772151898734, |
|
"grad_norm": 0.5924379753893186, |
|
"learning_rate": 3.967479674796748e-05, |
|
"loss": 0.4222, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.4982278481012658, |
|
"grad_norm": 0.7656367792606839, |
|
"learning_rate": 4e-05, |
|
"loss": 0.4016, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.5022784810126583, |
|
"grad_norm": 0.6818924685759373, |
|
"learning_rate": 3.999991946137476e-05, |
|
"loss": 0.4161, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.5063291139240507, |
|
"grad_norm": 0.6944208255145637, |
|
"learning_rate": 3.999967784614766e-05, |
|
"loss": 0.3965, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.5103797468354431, |
|
"grad_norm": 0.6619032928529758, |
|
"learning_rate": 3.9999275156264656e-05, |
|
"loss": 0.4207, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.5144303797468355, |
|
"grad_norm": 0.7440017619995255, |
|
"learning_rate": 3.999871139496895e-05, |
|
"loss": 0.3988, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.5184810126582279, |
|
"grad_norm": 0.7006634940207473, |
|
"learning_rate": 3.9997986566800995e-05, |
|
"loss": 0.4011, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.5225316455696203, |
|
"grad_norm": 0.659740156966535, |
|
"learning_rate": 3.999710067759846e-05, |
|
"loss": 0.4262, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.5265822784810127, |
|
"grad_norm": 0.6660826052181477, |
|
"learning_rate": 3.999605373449617e-05, |
|
"loss": 0.3759, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.5306329113924051, |
|
"grad_norm": 0.6586376071923136, |
|
"learning_rate": 3.9994845745926075e-05, |
|
"loss": 0.3903, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.5346835443037975, |
|
"grad_norm": 0.7736870042683938, |
|
"learning_rate": 3.999347672161713e-05, |
|
"loss": 0.4064, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.5387341772151899, |
|
"grad_norm": 0.7184471068674759, |
|
"learning_rate": 3.999194667259528e-05, |
|
"loss": 0.431, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.5427848101265823, |
|
"grad_norm": 0.6799058288922066, |
|
"learning_rate": 3.999025561118334e-05, |
|
"loss": 0.4092, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.5468354430379747, |
|
"grad_norm": 0.885061652663757, |
|
"learning_rate": 3.998840355100086e-05, |
|
"loss": 0.4129, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.5508860759493671, |
|
"grad_norm": 0.6115596154431552, |
|
"learning_rate": 3.998639050696409e-05, |
|
"loss": 0.3874, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.5549367088607595, |
|
"grad_norm": 0.6403307832931707, |
|
"learning_rate": 3.998421649528582e-05, |
|
"loss": 0.4228, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.5589873417721519, |
|
"grad_norm": 0.680755508804493, |
|
"learning_rate": 3.9981881533475234e-05, |
|
"loss": 0.3919, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.5630379746835443, |
|
"grad_norm": 0.5694142233057562, |
|
"learning_rate": 3.997938564033779e-05, |
|
"loss": 0.3913, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.5670886075949367, |
|
"grad_norm": 0.74996486232429, |
|
"learning_rate": 3.9976728835975064e-05, |
|
"loss": 0.4133, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.5711392405063291, |
|
"grad_norm": 0.5644196225080346, |
|
"learning_rate": 3.9973911141784605e-05, |
|
"loss": 0.409, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.5751898734177215, |
|
"grad_norm": 0.6544914212755923, |
|
"learning_rate": 3.997093258045973e-05, |
|
"loss": 0.4121, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.579240506329114, |
|
"grad_norm": 0.6621138063748118, |
|
"learning_rate": 3.996779317598936e-05, |
|
"loss": 0.4002, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.5832911392405064, |
|
"grad_norm": 0.7368973458752719, |
|
"learning_rate": 3.996449295365782e-05, |
|
"loss": 0.4231, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.5873417721518988, |
|
"grad_norm": 0.6818765832885281, |
|
"learning_rate": 3.996103194004467e-05, |
|
"loss": 0.4153, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.5913924050632912, |
|
"grad_norm": 0.5877368307693503, |
|
"learning_rate": 3.995741016302441e-05, |
|
"loss": 0.4134, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.5954430379746836, |
|
"grad_norm": 0.5234054732866966, |
|
"learning_rate": 3.9953627651766364e-05, |
|
"loss": 0.4113, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.599493670886076, |
|
"grad_norm": 0.6430987524992893, |
|
"learning_rate": 3.9949684436734325e-05, |
|
"loss": 0.4048, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.6035443037974684, |
|
"grad_norm": 0.5995164209180909, |
|
"learning_rate": 3.994558054968643e-05, |
|
"loss": 0.4135, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.6075949367088608, |
|
"grad_norm": 0.592760827920417, |
|
"learning_rate": 3.994131602367481e-05, |
|
"loss": 0.41, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.6116455696202532, |
|
"grad_norm": 0.6281108634647988, |
|
"learning_rate": 3.9936890893045376e-05, |
|
"loss": 0.3898, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.6156962025316456, |
|
"grad_norm": 0.635818299719854, |
|
"learning_rate": 3.993230519343752e-05, |
|
"loss": 0.4058, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.619746835443038, |
|
"grad_norm": 0.7141375025680428, |
|
"learning_rate": 3.992755896178383e-05, |
|
"loss": 0.4017, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.6237974683544304, |
|
"grad_norm": 0.6583271631309887, |
|
"learning_rate": 3.992265223630981e-05, |
|
"loss": 0.4017, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.6278481012658228, |
|
"grad_norm": 0.8499087214829654, |
|
"learning_rate": 3.991758505653355e-05, |
|
"loss": 0.4456, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.6318987341772152, |
|
"grad_norm": 0.6878218223037548, |
|
"learning_rate": 3.991235746326543e-05, |
|
"loss": 0.3861, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.6359493670886076, |
|
"grad_norm": 0.7363223736673241, |
|
"learning_rate": 3.9906969498607745e-05, |
|
"loss": 0.402, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.6228552998551349, |
|
"learning_rate": 3.990142120595444e-05, |
|
"loss": 0.4036, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.6440506329113924, |
|
"grad_norm": 0.6338456431608611, |
|
"learning_rate": 3.98957126299907e-05, |
|
"loss": 0.4046, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.6481012658227848, |
|
"grad_norm": 0.6389150882212308, |
|
"learning_rate": 3.9889843816692596e-05, |
|
"loss": 0.4172, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.6521518987341772, |
|
"grad_norm": 0.6036085899610176, |
|
"learning_rate": 3.9883814813326766e-05, |
|
"loss": 0.392, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.6562025316455696, |
|
"grad_norm": 0.6487134613296739, |
|
"learning_rate": 3.9877625668449956e-05, |
|
"loss": 0.3807, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.660253164556962, |
|
"grad_norm": 0.7137368002181714, |
|
"learning_rate": 3.98712764319087e-05, |
|
"loss": 0.4314, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.6643037974683544, |
|
"grad_norm": 0.7174312866511422, |
|
"learning_rate": 3.9864767154838864e-05, |
|
"loss": 0.3817, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.6683544303797468, |
|
"grad_norm": 0.625778058164935, |
|
"learning_rate": 3.9858097889665277e-05, |
|
"loss": 0.3888, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.6724050632911392, |
|
"grad_norm": 0.6349635015299684, |
|
"learning_rate": 3.985126869010129e-05, |
|
"loss": 0.4162, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.6764556962025317, |
|
"grad_norm": 0.652439920518161, |
|
"learning_rate": 3.984427961114833e-05, |
|
"loss": 0.3933, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.6805063291139241, |
|
"grad_norm": 0.5576323305075505, |
|
"learning_rate": 3.9837130709095475e-05, |
|
"loss": 0.3993, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.6845569620253165, |
|
"grad_norm": 0.6421873461967911, |
|
"learning_rate": 3.982982204151901e-05, |
|
"loss": 0.4059, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.6886075949367089, |
|
"grad_norm": 0.6076821066194985, |
|
"learning_rate": 3.982235366728193e-05, |
|
"loss": 0.4067, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.6926582278481013, |
|
"grad_norm": 0.5939078420510215, |
|
"learning_rate": 3.9814725646533505e-05, |
|
"loss": 0.4004, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.6967088607594937, |
|
"grad_norm": 0.5867878805038147, |
|
"learning_rate": 3.9806938040708746e-05, |
|
"loss": 0.4008, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.7007594936708861, |
|
"grad_norm": 0.5757788709347573, |
|
"learning_rate": 3.9798990912527976e-05, |
|
"loss": 0.3816, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.7048101265822785, |
|
"grad_norm": 0.5532889625182552, |
|
"learning_rate": 3.979088432599627e-05, |
|
"loss": 0.3974, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.7088607594936709, |
|
"grad_norm": 0.6463140180718661, |
|
"learning_rate": 3.9782618346402964e-05, |
|
"loss": 0.4008, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.7129113924050633, |
|
"grad_norm": 0.6001008150461599, |
|
"learning_rate": 3.977419304032111e-05, |
|
"loss": 0.4148, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.7169620253164557, |
|
"grad_norm": 0.571983419786105, |
|
"learning_rate": 3.976560847560697e-05, |
|
"loss": 0.3972, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.7210126582278481, |
|
"grad_norm": 0.5794236695408197, |
|
"learning_rate": 3.9756864721399456e-05, |
|
"loss": 0.3943, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.7250632911392405, |
|
"grad_norm": 0.5652736051090463, |
|
"learning_rate": 3.974796184811956e-05, |
|
"loss": 0.387, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.7291139240506329, |
|
"grad_norm": 0.5390132913353076, |
|
"learning_rate": 3.973889992746979e-05, |
|
"loss": 0.3985, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.7331645569620253, |
|
"grad_norm": 0.6579737794026344, |
|
"learning_rate": 3.972967903243361e-05, |
|
"loss": 0.3957, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.7372151898734177, |
|
"grad_norm": 0.5535396261992963, |
|
"learning_rate": 3.972029923727486e-05, |
|
"loss": 0.3915, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.7412658227848101, |
|
"grad_norm": 0.6298313116854628, |
|
"learning_rate": 3.971076061753709e-05, |
|
"loss": 0.3774, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.7453164556962025, |
|
"grad_norm": 0.7285762262941986, |
|
"learning_rate": 3.9701063250043066e-05, |
|
"loss": 0.3948, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.7493670886075949, |
|
"grad_norm": 0.5703312427240333, |
|
"learning_rate": 3.969120721289402e-05, |
|
"loss": 0.399, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.7534177215189873, |
|
"grad_norm": 0.7237833313644476, |
|
"learning_rate": 3.9681192585469146e-05, |
|
"loss": 0.3839, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.7574683544303797, |
|
"grad_norm": 0.4954374154466907, |
|
"learning_rate": 3.9671019448424865e-05, |
|
"loss": 0.3969, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.7615189873417721, |
|
"grad_norm": 0.6898169795174586, |
|
"learning_rate": 3.966068788369422e-05, |
|
"loss": 0.3968, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.7655696202531646, |
|
"grad_norm": 0.5267028206688844, |
|
"learning_rate": 3.965019797448622e-05, |
|
"loss": 0.3985, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.769620253164557, |
|
"grad_norm": 0.6476845232289967, |
|
"learning_rate": 3.963954980528515e-05, |
|
"loss": 0.3835, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.7736708860759494, |
|
"grad_norm": 0.5789930766181848, |
|
"learning_rate": 3.9628743461849905e-05, |
|
"loss": 0.3771, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.7777215189873418, |
|
"grad_norm": 0.6138996960276488, |
|
"learning_rate": 3.961777903121329e-05, |
|
"loss": 0.4082, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.7817721518987342, |
|
"grad_norm": 0.5904923923390615, |
|
"learning_rate": 3.960665660168131e-05, |
|
"loss": 0.412, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.7858227848101266, |
|
"grad_norm": 0.5968133281987092, |
|
"learning_rate": 3.9595376262832485e-05, |
|
"loss": 0.3794, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.789873417721519, |
|
"grad_norm": 0.6187939397515435, |
|
"learning_rate": 3.9583938105517127e-05, |
|
"loss": 0.3794, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.7939240506329114, |
|
"grad_norm": 0.5971090829024325, |
|
"learning_rate": 3.957234222185657e-05, |
|
"loss": 0.4039, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.7979746835443038, |
|
"grad_norm": 0.5844199254836303, |
|
"learning_rate": 3.9560588705242474e-05, |
|
"loss": 0.3968, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.8020253164556962, |
|
"grad_norm": 0.5472233358236805, |
|
"learning_rate": 3.954867765033605e-05, |
|
"loss": 0.3794, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.8060759493670886, |
|
"grad_norm": 0.6622303194616858, |
|
"learning_rate": 3.953660915306728e-05, |
|
"loss": 0.4008, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.810126582278481, |
|
"grad_norm": 0.5001808326526325, |
|
"learning_rate": 3.952438331063419e-05, |
|
"loss": 0.3775, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.8141772151898734, |
|
"grad_norm": 0.5369976776025599, |
|
"learning_rate": 3.951200022150205e-05, |
|
"loss": 0.3787, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.8182278481012658, |
|
"grad_norm": 0.5433981435629911, |
|
"learning_rate": 3.949945998540253e-05, |
|
"loss": 0.3834, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.8222784810126582, |
|
"grad_norm": 0.5510766980698514, |
|
"learning_rate": 3.9486762703332993e-05, |
|
"loss": 0.3765, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.8263291139240506, |
|
"grad_norm": 0.620797438990786, |
|
"learning_rate": 3.947390847755559e-05, |
|
"loss": 0.39, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.830379746835443, |
|
"grad_norm": 0.4867280277834648, |
|
"learning_rate": 3.946089741159648e-05, |
|
"loss": 0.3708, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.8344303797468354, |
|
"grad_norm": 0.6155041820392513, |
|
"learning_rate": 3.944772961024501e-05, |
|
"loss": 0.3948, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.8384810126582278, |
|
"grad_norm": 0.5597973556750337, |
|
"learning_rate": 3.943440517955285e-05, |
|
"loss": 0.3739, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.8425316455696202, |
|
"grad_norm": 0.5892277346595501, |
|
"learning_rate": 3.9420924226833126e-05, |
|
"loss": 0.3993, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.8465822784810126, |
|
"grad_norm": 0.44379375485214617, |
|
"learning_rate": 3.9407286860659566e-05, |
|
"loss": 0.3703, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.850632911392405, |
|
"grad_norm": 0.5626310462545386, |
|
"learning_rate": 3.9393493190865657e-05, |
|
"loss": 0.3771, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.8546835443037974, |
|
"grad_norm": 0.512024111083107, |
|
"learning_rate": 3.937954332854371e-05, |
|
"loss": 0.3939, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.8587341772151899, |
|
"grad_norm": 0.5175003555918976, |
|
"learning_rate": 3.9365437386044016e-05, |
|
"loss": 0.3843, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.8627848101265823, |
|
"grad_norm": 0.5246318923036594, |
|
"learning_rate": 3.935117547697387e-05, |
|
"loss": 0.393, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.8668354430379747, |
|
"grad_norm": 0.6216385081591411, |
|
"learning_rate": 3.933675771619675e-05, |
|
"loss": 0.3913, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.8708860759493671, |
|
"grad_norm": 0.5529226926998263, |
|
"learning_rate": 3.932218421983131e-05, |
|
"loss": 0.4139, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.8749367088607595, |
|
"grad_norm": 0.5397959084428513, |
|
"learning_rate": 3.9307455105250484e-05, |
|
"loss": 0.3841, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.8789873417721519, |
|
"grad_norm": 0.6221479740540723, |
|
"learning_rate": 3.929257049108054e-05, |
|
"loss": 0.3959, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.8830379746835443, |
|
"grad_norm": 0.6006835380209825, |
|
"learning_rate": 3.927753049720011e-05, |
|
"loss": 0.3772, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.8870886075949367, |
|
"grad_norm": 0.600451054400137, |
|
"learning_rate": 3.9262335244739234e-05, |
|
"loss": 0.3972, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.8911392405063291, |
|
"grad_norm": 0.5781664812386912, |
|
"learning_rate": 3.92469848560784e-05, |
|
"loss": 0.3879, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.8951898734177215, |
|
"grad_norm": 0.6044966388830026, |
|
"learning_rate": 3.923147945484751e-05, |
|
"loss": 0.3814, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.8992405063291139, |
|
"grad_norm": 0.48351242218622664, |
|
"learning_rate": 3.9215819165924956e-05, |
|
"loss": 0.3776, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.9032911392405063, |
|
"grad_norm": 0.4774873161628841, |
|
"learning_rate": 3.920000411543654e-05, |
|
"loss": 0.3811, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.9073417721518987, |
|
"grad_norm": 0.544164808884084, |
|
"learning_rate": 3.9184034430754495e-05, |
|
"loss": 0.387, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.9113924050632911, |
|
"grad_norm": 0.48800668588648755, |
|
"learning_rate": 3.916791024049648e-05, |
|
"loss": 0.3884, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.9154430379746835, |
|
"grad_norm": 0.47819100281546206, |
|
"learning_rate": 3.91516316745245e-05, |
|
"loss": 0.3986, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.9194936708860759, |
|
"grad_norm": 0.6276925588637617, |
|
"learning_rate": 3.913519886394389e-05, |
|
"loss": 0.398, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.9235443037974683, |
|
"grad_norm": 0.4855121013534481, |
|
"learning_rate": 3.911861194110225e-05, |
|
"loss": 0.3749, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.9275949367088607, |
|
"grad_norm": 0.5506060489671165, |
|
"learning_rate": 3.910187103958837e-05, |
|
"loss": 0.3789, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.9316455696202531, |
|
"grad_norm": 0.5032567217512716, |
|
"learning_rate": 3.908497629423117e-05, |
|
"loss": 0.3834, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.9356962025316455, |
|
"grad_norm": 0.5074510159899155, |
|
"learning_rate": 3.9067927841098614e-05, |
|
"loss": 0.3782, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.9397468354430379, |
|
"grad_norm": 0.5810482251424521, |
|
"learning_rate": 3.9050725817496594e-05, |
|
"loss": 0.3994, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.9437974683544303, |
|
"grad_norm": 0.48154218893890627, |
|
"learning_rate": 3.9033370361967844e-05, |
|
"loss": 0.3879, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.9478481012658228, |
|
"grad_norm": 0.5036518583142346, |
|
"learning_rate": 3.901586161429081e-05, |
|
"loss": 0.3833, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.9518987341772152, |
|
"grad_norm": 0.515991760586983, |
|
"learning_rate": 3.8998199715478545e-05, |
|
"loss": 0.399, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.9559493670886076, |
|
"grad_norm": 0.5564364229482045, |
|
"learning_rate": 3.8980384807777564e-05, |
|
"loss": 0.3947, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.49125752307482756, |
|
"learning_rate": 3.896241703466667e-05, |
|
"loss": 0.3773, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.9640506329113924, |
|
"grad_norm": 0.5490586452032531, |
|
"learning_rate": 3.894429654085585e-05, |
|
"loss": 0.3795, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.9681012658227848, |
|
"grad_norm": 0.5093727901682145, |
|
"learning_rate": 3.892602347228505e-05, |
|
"loss": 0.3859, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.9721518987341772, |
|
"grad_norm": 0.5108191774877997, |
|
"learning_rate": 3.890759797612307e-05, |
|
"loss": 0.3938, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.9762025316455696, |
|
"grad_norm": 0.5480494146076412, |
|
"learning_rate": 3.888902020076632e-05, |
|
"loss": 0.3771, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.980253164556962, |
|
"grad_norm": 0.590101940050008, |
|
"learning_rate": 3.887029029583764e-05, |
|
"loss": 0.4018, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.9843037974683544, |
|
"grad_norm": 0.5889911022596893, |
|
"learning_rate": 3.8851408412185125e-05, |
|
"loss": 0.3885, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.9883544303797468, |
|
"grad_norm": 0.5800811745265421, |
|
"learning_rate": 3.8832374701880855e-05, |
|
"loss": 0.3787, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.9924050632911392, |
|
"grad_norm": 0.506490481412578, |
|
"learning_rate": 3.881318931821972e-05, |
|
"loss": 0.3828, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.9964556962025316, |
|
"grad_norm": 0.5770643229455178, |
|
"learning_rate": 3.879385241571817e-05, |
|
"loss": 0.3939, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.0020253164556963, |
|
"grad_norm": 0.5119462113445291, |
|
"learning_rate": 3.8774364150112955e-05, |
|
"loss": 0.3452, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 1.0060759493670886, |
|
"grad_norm": 0.5527818723620753, |
|
"learning_rate": 3.8754724678359884e-05, |
|
"loss": 0.3097, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.010126582278481, |
|
"grad_norm": 0.4579276642691292, |
|
"learning_rate": 3.873493415863256e-05, |
|
"loss": 0.3177, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 1.0141772151898734, |
|
"grad_norm": 0.527530146054158, |
|
"learning_rate": 3.871499275032111e-05, |
|
"loss": 0.3021, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.0182278481012659, |
|
"grad_norm": 0.5705210713556268, |
|
"learning_rate": 3.869490061403091e-05, |
|
"loss": 0.3016, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 1.0222784810126582, |
|
"grad_norm": 0.49730647908918246, |
|
"learning_rate": 3.867465791158124e-05, |
|
"loss": 0.3104, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 1.0263291139240507, |
|
"grad_norm": 0.5191567581279882, |
|
"learning_rate": 3.865426480600407e-05, |
|
"loss": 0.3022, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 1.030379746835443, |
|
"grad_norm": 0.5222183849920577, |
|
"learning_rate": 3.863372146154264e-05, |
|
"loss": 0.3202, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 1.0344303797468355, |
|
"grad_norm": 0.443944722224778, |
|
"learning_rate": 3.861302804365024e-05, |
|
"loss": 0.31, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.0384810126582278, |
|
"grad_norm": 0.5248368444059038, |
|
"learning_rate": 3.85921847189888e-05, |
|
"loss": 0.3012, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 1.0425316455696203, |
|
"grad_norm": 0.5055083932770217, |
|
"learning_rate": 3.85711916554276e-05, |
|
"loss": 0.2938, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 1.0465822784810126, |
|
"grad_norm": 0.4973054204391343, |
|
"learning_rate": 3.85500490220419e-05, |
|
"loss": 0.3052, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 1.0506329113924051, |
|
"grad_norm": 0.5736412831573663, |
|
"learning_rate": 3.852875698911154e-05, |
|
"loss": 0.3248, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 1.0546835443037974, |
|
"grad_norm": 0.5282538316595922, |
|
"learning_rate": 3.850731572811963e-05, |
|
"loss": 0.3223, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.05873417721519, |
|
"grad_norm": 0.48180766490230803, |
|
"learning_rate": 3.848572541175116e-05, |
|
"loss": 0.3005, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 1.0627848101265822, |
|
"grad_norm": 0.6029936632964609, |
|
"learning_rate": 3.846398621389154e-05, |
|
"loss": 0.3065, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 1.0668354430379747, |
|
"grad_norm": 0.6251798932205733, |
|
"learning_rate": 3.84420983096253e-05, |
|
"loss": 0.304, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 1.070886075949367, |
|
"grad_norm": 0.5471003515686732, |
|
"learning_rate": 3.8420061875234606e-05, |
|
"loss": 0.3129, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 1.0749367088607595, |
|
"grad_norm": 0.5700951362651965, |
|
"learning_rate": 3.839787708819787e-05, |
|
"loss": 0.3256, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.0789873417721518, |
|
"grad_norm": 0.540576948495851, |
|
"learning_rate": 3.8375544127188325e-05, |
|
"loss": 0.3144, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 1.0830379746835443, |
|
"grad_norm": 0.5806600208467062, |
|
"learning_rate": 3.8353063172072564e-05, |
|
"loss": 0.3103, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 1.0870886075949366, |
|
"grad_norm": 0.5216714256728742, |
|
"learning_rate": 3.8330434403909105e-05, |
|
"loss": 0.3002, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 1.0911392405063292, |
|
"grad_norm": 0.5197974780451522, |
|
"learning_rate": 3.8307658004946934e-05, |
|
"loss": 0.3148, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 1.0951898734177214, |
|
"grad_norm": 0.5013447148714038, |
|
"learning_rate": 3.8284734158624046e-05, |
|
"loss": 0.2917, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.099240506329114, |
|
"grad_norm": 0.48168530745886773, |
|
"learning_rate": 3.826166304956594e-05, |
|
"loss": 0.2864, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 1.1032911392405063, |
|
"grad_norm": 0.47655058494480357, |
|
"learning_rate": 3.8238444863584164e-05, |
|
"loss": 0.3163, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 1.1073417721518988, |
|
"grad_norm": 0.5339220268147085, |
|
"learning_rate": 3.821507978767479e-05, |
|
"loss": 0.2975, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 1.111392405063291, |
|
"grad_norm": 0.4626088900765369, |
|
"learning_rate": 3.819156801001693e-05, |
|
"loss": 0.2912, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 1.1154430379746836, |
|
"grad_norm": 0.47443887067956175, |
|
"learning_rate": 3.816790971997121e-05, |
|
"loss": 0.2993, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.1194936708860759, |
|
"grad_norm": 0.4093689960905338, |
|
"learning_rate": 3.8144105108078246e-05, |
|
"loss": 0.2765, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 1.1235443037974684, |
|
"grad_norm": 0.5392722626876307, |
|
"learning_rate": 3.81201543660571e-05, |
|
"loss": 0.3034, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 1.1275949367088607, |
|
"grad_norm": 0.40602409296275105, |
|
"learning_rate": 3.809605768680377e-05, |
|
"loss": 0.2997, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 1.1316455696202532, |
|
"grad_norm": 0.49593832025547946, |
|
"learning_rate": 3.807181526438958e-05, |
|
"loss": 0.302, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 1.1356962025316455, |
|
"grad_norm": 0.4532181324474753, |
|
"learning_rate": 3.8047427294059697e-05, |
|
"loss": 0.2924, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.139746835443038, |
|
"grad_norm": 0.48868798702594746, |
|
"learning_rate": 3.802289397223145e-05, |
|
"loss": 0.3257, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 1.1437974683544303, |
|
"grad_norm": 0.5037948093099135, |
|
"learning_rate": 3.7998215496492854e-05, |
|
"loss": 0.2936, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 1.1478481012658228, |
|
"grad_norm": 0.46438947317739393, |
|
"learning_rate": 3.797339206560096e-05, |
|
"loss": 0.2981, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 1.1518987341772151, |
|
"grad_norm": 0.5150653780907053, |
|
"learning_rate": 3.794842387948027e-05, |
|
"loss": 0.3205, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 1.1559493670886076, |
|
"grad_norm": 0.45957375432046205, |
|
"learning_rate": 3.7923311139221114e-05, |
|
"loss": 0.2897, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.5385042167668759, |
|
"learning_rate": 3.7898054047078054e-05, |
|
"loss": 0.3085, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 1.1640506329113924, |
|
"grad_norm": 0.5724405847896237, |
|
"learning_rate": 3.787265280646825e-05, |
|
"loss": 0.3048, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 1.1681012658227847, |
|
"grad_norm": 0.5138764148024508, |
|
"learning_rate": 3.7847107621969786e-05, |
|
"loss": 0.3074, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 1.1721518987341772, |
|
"grad_norm": 0.4298504289754485, |
|
"learning_rate": 3.7821418699320064e-05, |
|
"loss": 0.2965, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 1.1762025316455695, |
|
"grad_norm": 0.48863743774152546, |
|
"learning_rate": 3.7795586245414145e-05, |
|
"loss": 0.3083, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.180253164556962, |
|
"grad_norm": 0.5157212291684674, |
|
"learning_rate": 3.776961046830306e-05, |
|
"loss": 0.3082, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 1.1843037974683543, |
|
"grad_norm": 0.4247714743123847, |
|
"learning_rate": 3.774349157719215e-05, |
|
"loss": 0.2976, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 1.1883544303797469, |
|
"grad_norm": 0.5022320658321971, |
|
"learning_rate": 3.7717229782439365e-05, |
|
"loss": 0.313, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 1.1924050632911392, |
|
"grad_norm": 0.501299357894708, |
|
"learning_rate": 3.769082529555359e-05, |
|
"loss": 0.2874, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 1.1964556962025317, |
|
"grad_norm": 0.42648183097507847, |
|
"learning_rate": 3.766427832919294e-05, |
|
"loss": 0.2924, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.200506329113924, |
|
"grad_norm": 0.4584777833491372, |
|
"learning_rate": 3.7637589097163024e-05, |
|
"loss": 0.3099, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 1.2045569620253165, |
|
"grad_norm": 0.5329701313500251, |
|
"learning_rate": 3.761075781441526e-05, |
|
"loss": 0.3163, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 1.2086075949367088, |
|
"grad_norm": 0.44114986093953573, |
|
"learning_rate": 3.75837846970451e-05, |
|
"loss": 0.2815, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 1.2126582278481013, |
|
"grad_norm": 0.4576808827895461, |
|
"learning_rate": 3.755666996229032e-05, |
|
"loss": 0.2985, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 1.2167088607594936, |
|
"grad_norm": 0.46769025207998915, |
|
"learning_rate": 3.752941382852927e-05, |
|
"loss": 0.324, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.220759493670886, |
|
"grad_norm": 0.4614379930609617, |
|
"learning_rate": 3.7502016515279115e-05, |
|
"loss": 0.3101, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 1.2248101265822784, |
|
"grad_norm": 0.45272080940932685, |
|
"learning_rate": 3.7474478243194043e-05, |
|
"loss": 0.3094, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 1.228860759493671, |
|
"grad_norm": 0.46910275561436887, |
|
"learning_rate": 3.744679923406351e-05, |
|
"loss": 0.2887, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 1.2329113924050632, |
|
"grad_norm": 0.44788968658490463, |
|
"learning_rate": 3.741897971081043e-05, |
|
"loss": 0.3252, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 1.2369620253164557, |
|
"grad_norm": 0.48114035064975796, |
|
"learning_rate": 3.739101989748946e-05, |
|
"loss": 0.2967, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.241012658227848, |
|
"grad_norm": 0.4653236359051067, |
|
"learning_rate": 3.7362920019285066e-05, |
|
"loss": 0.3027, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 1.2450632911392405, |
|
"grad_norm": 0.46832754955085637, |
|
"learning_rate": 3.73346803025098e-05, |
|
"loss": 0.2999, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 1.2491139240506328, |
|
"grad_norm": 0.4652776362236201, |
|
"learning_rate": 3.730630097460247e-05, |
|
"loss": 0.3054, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 1.2531645569620253, |
|
"grad_norm": 0.4603325766079485, |
|
"learning_rate": 3.727778226412628e-05, |
|
"loss": 0.2974, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 1.2572151898734178, |
|
"grad_norm": 0.5242601165254391, |
|
"learning_rate": 3.7249124400767006e-05, |
|
"loss": 0.2928, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.2612658227848101, |
|
"grad_norm": 0.457449421527991, |
|
"learning_rate": 3.722032761533114e-05, |
|
"loss": 0.3013, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 1.2653164556962024, |
|
"grad_norm": 0.44047819382645303, |
|
"learning_rate": 3.719139213974403e-05, |
|
"loss": 0.3064, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 1.269367088607595, |
|
"grad_norm": 0.5743488605158066, |
|
"learning_rate": 3.7162318207048006e-05, |
|
"loss": 0.2919, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 1.2734177215189875, |
|
"grad_norm": 0.5250146260989887, |
|
"learning_rate": 3.713310605140055e-05, |
|
"loss": 0.3275, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 1.2774683544303798, |
|
"grad_norm": 0.4776512093775642, |
|
"learning_rate": 3.710375590807233e-05, |
|
"loss": 0.2988, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.281518987341772, |
|
"grad_norm": 0.4766787132595684, |
|
"learning_rate": 3.7074268013445365e-05, |
|
"loss": 0.298, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 1.2855696202531646, |
|
"grad_norm": 0.5162005800825932, |
|
"learning_rate": 3.7044642605011114e-05, |
|
"loss": 0.3009, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 1.289620253164557, |
|
"grad_norm": 0.48671360357105886, |
|
"learning_rate": 3.701487992136854e-05, |
|
"loss": 0.2963, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 1.2936708860759494, |
|
"grad_norm": 0.4841946032999693, |
|
"learning_rate": 3.69849802022222e-05, |
|
"loss": 0.3002, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 1.2977215189873417, |
|
"grad_norm": 0.4316781650393908, |
|
"learning_rate": 3.6954943688380334e-05, |
|
"loss": 0.305, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.3017721518987342, |
|
"grad_norm": 0.5235036705376247, |
|
"learning_rate": 3.692477062175289e-05, |
|
"loss": 0.3181, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 1.3058227848101267, |
|
"grad_norm": 0.4466102494246738, |
|
"learning_rate": 3.689446124534958e-05, |
|
"loss": 0.2987, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 1.309873417721519, |
|
"grad_norm": 0.5202800226337809, |
|
"learning_rate": 3.686401580327799e-05, |
|
"loss": 0.3091, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 1.3139240506329113, |
|
"grad_norm": 0.47740159593546666, |
|
"learning_rate": 3.683343454074149e-05, |
|
"loss": 0.3026, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 1.3179746835443038, |
|
"grad_norm": 0.4454982580285219, |
|
"learning_rate": 3.6802717704037386e-05, |
|
"loss": 0.2872, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.3220253164556963, |
|
"grad_norm": 0.46433523883719785, |
|
"learning_rate": 3.6771865540554855e-05, |
|
"loss": 0.298, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 1.3260759493670886, |
|
"grad_norm": 0.4477776564759945, |
|
"learning_rate": 3.674087829877297e-05, |
|
"loss": 0.3029, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 1.330126582278481, |
|
"grad_norm": 0.5098147560421078, |
|
"learning_rate": 3.6709756228258735e-05, |
|
"loss": 0.2943, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 1.3341772151898734, |
|
"grad_norm": 0.4433116635942819, |
|
"learning_rate": 3.667849957966501e-05, |
|
"loss": 0.294, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 1.338227848101266, |
|
"grad_norm": 0.5379723514540549, |
|
"learning_rate": 3.6647108604728546e-05, |
|
"loss": 0.3019, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.3422784810126582, |
|
"grad_norm": 0.45666548180103406, |
|
"learning_rate": 3.661558355626795e-05, |
|
"loss": 0.3246, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 1.3463291139240505, |
|
"grad_norm": 0.4581191047690734, |
|
"learning_rate": 3.658392468818163e-05, |
|
"loss": 0.3128, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 1.350379746835443, |
|
"grad_norm": 0.44350235228443013, |
|
"learning_rate": 3.655213225544574e-05, |
|
"loss": 0.3171, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 1.3544303797468356, |
|
"grad_norm": 0.48518476380157766, |
|
"learning_rate": 3.652020651411218e-05, |
|
"loss": 0.3004, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 1.3584810126582278, |
|
"grad_norm": 0.5224992747059537, |
|
"learning_rate": 3.6488147721306474e-05, |
|
"loss": 0.3209, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.3625316455696201, |
|
"grad_norm": 0.540396004921538, |
|
"learning_rate": 3.645595613522574e-05, |
|
"loss": 0.3143, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 1.3665822784810127, |
|
"grad_norm": 0.5201617392244775, |
|
"learning_rate": 3.642363201513657e-05, |
|
"loss": 0.3082, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 1.3706329113924052, |
|
"grad_norm": 0.5441240273733703, |
|
"learning_rate": 3.6391175621373006e-05, |
|
"loss": 0.2958, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 1.3746835443037975, |
|
"grad_norm": 0.5214847654673751, |
|
"learning_rate": 3.6358587215334355e-05, |
|
"loss": 0.3083, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 1.3787341772151898, |
|
"grad_norm": 0.49053652512583373, |
|
"learning_rate": 3.632586705948318e-05, |
|
"loss": 0.3019, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.3827848101265823, |
|
"grad_norm": 0.5446647596513042, |
|
"learning_rate": 3.629301541734311e-05, |
|
"loss": 0.3163, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 1.3868354430379748, |
|
"grad_norm": 0.4855883272214701, |
|
"learning_rate": 3.626003255349676e-05, |
|
"loss": 0.3063, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 1.390886075949367, |
|
"grad_norm": 0.47091587584283323, |
|
"learning_rate": 3.622691873358357e-05, |
|
"loss": 0.2985, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 1.3949367088607594, |
|
"grad_norm": 0.45726600914468496, |
|
"learning_rate": 3.61936742242977e-05, |
|
"loss": 0.3079, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 1.398987341772152, |
|
"grad_norm": 0.45871129338125266, |
|
"learning_rate": 3.6160299293385864e-05, |
|
"loss": 0.3063, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.4030379746835444, |
|
"grad_norm": 0.5007993528671896, |
|
"learning_rate": 3.612679420964516e-05, |
|
"loss": 0.3147, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 1.4070886075949367, |
|
"grad_norm": 0.47209979465846225, |
|
"learning_rate": 3.609315924292092e-05, |
|
"loss": 0.3174, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 1.411139240506329, |
|
"grad_norm": 0.5617549661656868, |
|
"learning_rate": 3.6059394664104554e-05, |
|
"loss": 0.3137, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 1.4151898734177215, |
|
"grad_norm": 0.4688395380620716, |
|
"learning_rate": 3.602550074513133e-05, |
|
"loss": 0.3162, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 1.419240506329114, |
|
"grad_norm": 0.47815450827690326, |
|
"learning_rate": 3.599147775897822e-05, |
|
"loss": 0.3165, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.4232911392405063, |
|
"grad_norm": 0.47145828067705864, |
|
"learning_rate": 3.595732597966167e-05, |
|
"loss": 0.3229, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 1.4273417721518986, |
|
"grad_norm": 0.4547508955589941, |
|
"learning_rate": 3.592304568223542e-05, |
|
"loss": 0.2986, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 1.4313924050632911, |
|
"grad_norm": 0.5221816415689219, |
|
"learning_rate": 3.588863714278826e-05, |
|
"loss": 0.3132, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 1.4354430379746836, |
|
"grad_norm": 0.42073171876440835, |
|
"learning_rate": 3.585410063844186e-05, |
|
"loss": 0.3034, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 1.439493670886076, |
|
"grad_norm": 0.4453229373800704, |
|
"learning_rate": 3.581943644734846e-05, |
|
"loss": 0.2963, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.4435443037974682, |
|
"grad_norm": 0.504515538504155, |
|
"learning_rate": 3.578464484868869e-05, |
|
"loss": 0.301, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 1.4475949367088607, |
|
"grad_norm": 0.43627883244781784, |
|
"learning_rate": 3.5749726122669316e-05, |
|
"loss": 0.3027, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 1.4516455696202533, |
|
"grad_norm": 0.490280149579753, |
|
"learning_rate": 3.5714680550520943e-05, |
|
"loss": 0.3168, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 1.4556962025316456, |
|
"grad_norm": 0.4271038725688359, |
|
"learning_rate": 3.5679508414495794e-05, |
|
"loss": 0.305, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 1.4597468354430378, |
|
"grad_norm": 0.4603660280193674, |
|
"learning_rate": 3.564420999786543e-05, |
|
"loss": 0.3118, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.4637974683544304, |
|
"grad_norm": 0.4440070665837638, |
|
"learning_rate": 3.560878558491842e-05, |
|
"loss": 0.2954, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 1.4678481012658229, |
|
"grad_norm": 0.4408229888280627, |
|
"learning_rate": 3.5573235460958145e-05, |
|
"loss": 0.3045, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 1.4718987341772152, |
|
"grad_norm": 0.4359195776654579, |
|
"learning_rate": 3.553755991230039e-05, |
|
"loss": 0.2919, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 1.4759493670886075, |
|
"grad_norm": 0.44831008451283666, |
|
"learning_rate": 3.5501759226271144e-05, |
|
"loss": 0.3046, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.5059195483699525, |
|
"learning_rate": 3.546583369120419e-05, |
|
"loss": 0.3108, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.4840506329113925, |
|
"grad_norm": 0.480524239556035, |
|
"learning_rate": 3.5429783596438864e-05, |
|
"loss": 0.3064, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 1.4881012658227848, |
|
"grad_norm": 0.517108024118541, |
|
"learning_rate": 3.539360923231766e-05, |
|
"loss": 0.3122, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 1.492151898734177, |
|
"grad_norm": 0.4813759295624822, |
|
"learning_rate": 3.535731089018394e-05, |
|
"loss": 0.2955, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 1.4962025316455696, |
|
"grad_norm": 0.48207438047968065, |
|
"learning_rate": 3.532088886237956e-05, |
|
"loss": 0.299, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 1.5002531645569621, |
|
"grad_norm": 0.45184543463435306, |
|
"learning_rate": 3.528434344224253e-05, |
|
"loss": 0.2973, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.5043037974683544, |
|
"grad_norm": 0.4912563126958098, |
|
"learning_rate": 3.524767492410464e-05, |
|
"loss": 0.3001, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 1.5083544303797467, |
|
"grad_norm": 0.5113080100559954, |
|
"learning_rate": 3.521088360328908e-05, |
|
"loss": 0.3071, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.5124050632911392, |
|
"grad_norm": 0.548572520763522, |
|
"learning_rate": 3.517396977610811e-05, |
|
"loss": 0.3286, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 1.5164556962025317, |
|
"grad_norm": 0.49087557755734745, |
|
"learning_rate": 3.5136933739860595e-05, |
|
"loss": 0.2945, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 1.520506329113924, |
|
"grad_norm": 0.5322975048394536, |
|
"learning_rate": 3.509977579282971e-05, |
|
"loss": 0.3145, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.5245569620253163, |
|
"grad_norm": 0.4980937475008699, |
|
"learning_rate": 3.5062496234280424e-05, |
|
"loss": 0.3098, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 1.5286075949367088, |
|
"grad_norm": 0.4841058510336391, |
|
"learning_rate": 3.502509536445719e-05, |
|
"loss": 0.3134, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 1.5326582278481014, |
|
"grad_norm": 0.5528801789877645, |
|
"learning_rate": 3.498757348458147e-05, |
|
"loss": 0.3165, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 1.5367088607594936, |
|
"grad_norm": 0.4342132714384229, |
|
"learning_rate": 3.4949930896849324e-05, |
|
"loss": 0.307, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 1.540759493670886, |
|
"grad_norm": 0.507784187097784, |
|
"learning_rate": 3.491216790442899e-05, |
|
"loss": 0.3139, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.5448101265822785, |
|
"grad_norm": 0.48597571412337054, |
|
"learning_rate": 3.487428481145839e-05, |
|
"loss": 0.3188, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 1.548860759493671, |
|
"grad_norm": 0.48236367161956795, |
|
"learning_rate": 3.483628192304278e-05, |
|
"loss": 0.3198, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 1.5529113924050633, |
|
"grad_norm": 0.49469423696822407, |
|
"learning_rate": 3.479815954525219e-05, |
|
"loss": 0.3146, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 1.5569620253164556, |
|
"grad_norm": 0.4116047821486045, |
|
"learning_rate": 3.475991798511899e-05, |
|
"loss": 0.324, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 1.561012658227848, |
|
"grad_norm": 0.4549436017545799, |
|
"learning_rate": 3.4721557550635464e-05, |
|
"loss": 0.2905, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.5650632911392406, |
|
"grad_norm": 0.42749453233079765, |
|
"learning_rate": 3.468307855075128e-05, |
|
"loss": 0.3097, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 1.5691139240506329, |
|
"grad_norm": 0.4359857621249113, |
|
"learning_rate": 3.4644481295371005e-05, |
|
"loss": 0.3003, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 1.5731645569620252, |
|
"grad_norm": 0.44160691119800843, |
|
"learning_rate": 3.460576609535163e-05, |
|
"loss": 0.316, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 1.5772151898734177, |
|
"grad_norm": 0.4497581908557862, |
|
"learning_rate": 3.456693326250006e-05, |
|
"loss": 0.3126, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 1.5812658227848102, |
|
"grad_norm": 0.3975487948464202, |
|
"learning_rate": 3.452798310957058e-05, |
|
"loss": 0.3044, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.5853164556962025, |
|
"grad_norm": 0.40744867962963816, |
|
"learning_rate": 3.4488915950262386e-05, |
|
"loss": 0.3078, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 1.5893670886075948, |
|
"grad_norm": 0.45824243500582085, |
|
"learning_rate": 3.4449732099216985e-05, |
|
"loss": 0.3058, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 1.5934177215189873, |
|
"grad_norm": 0.4698896818439876, |
|
"learning_rate": 3.441043187201574e-05, |
|
"loss": 0.3255, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 1.5974683544303798, |
|
"grad_norm": 0.4452801231260795, |
|
"learning_rate": 3.437101558517728e-05, |
|
"loss": 0.3123, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 1.6015189873417721, |
|
"grad_norm": 0.4802636611696594, |
|
"learning_rate": 3.433148355615496e-05, |
|
"loss": 0.3162, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.6055696202531644, |
|
"grad_norm": 0.4278125455689906, |
|
"learning_rate": 3.4291836103334294e-05, |
|
"loss": 0.2944, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.609620253164557, |
|
"grad_norm": 0.4901042395637965, |
|
"learning_rate": 3.425207354603043e-05, |
|
"loss": 0.3076, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 1.6136708860759494, |
|
"grad_norm": 0.43271530948036646, |
|
"learning_rate": 3.421219620448553e-05, |
|
"loss": 0.292, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 1.6177215189873417, |
|
"grad_norm": 0.45062297168166876, |
|
"learning_rate": 3.417220439986623e-05, |
|
"loss": 0.305, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 1.621772151898734, |
|
"grad_norm": 0.4678118112405171, |
|
"learning_rate": 3.4132098454261024e-05, |
|
"loss": 0.3074, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.6258227848101265, |
|
"grad_norm": 0.4788363761185329, |
|
"learning_rate": 3.4091878690677676e-05, |
|
"loss": 0.3155, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 1.629873417721519, |
|
"grad_norm": 0.5059138518649388, |
|
"learning_rate": 3.405154543304065e-05, |
|
"loss": 0.3158, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 1.6339240506329114, |
|
"grad_norm": 0.4277952153189073, |
|
"learning_rate": 3.401109900618843e-05, |
|
"loss": 0.2975, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 1.6379746835443036, |
|
"grad_norm": 0.5124314010844975, |
|
"learning_rate": 3.3970539735870996e-05, |
|
"loss": 0.316, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 1.6420253164556962, |
|
"grad_norm": 0.45410583538562904, |
|
"learning_rate": 3.392986794874714e-05, |
|
"loss": 0.3198, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.6460759493670887, |
|
"grad_norm": 0.43408737393832886, |
|
"learning_rate": 3.388908397238184e-05, |
|
"loss": 0.3028, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 1.650126582278481, |
|
"grad_norm": 0.4567568107828501, |
|
"learning_rate": 3.384818813524362e-05, |
|
"loss": 0.3106, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 1.6541772151898733, |
|
"grad_norm": 0.46921501020257195, |
|
"learning_rate": 3.380718076670195e-05, |
|
"loss": 0.3263, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.6582278481012658, |
|
"grad_norm": 0.4778818448510691, |
|
"learning_rate": 3.376606219702454e-05, |
|
"loss": 0.3001, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 1.6622784810126583, |
|
"grad_norm": 0.4730293501324466, |
|
"learning_rate": 3.372483275737468e-05, |
|
"loss": 0.3052, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.6663291139240506, |
|
"grad_norm": 0.4414730032458689, |
|
"learning_rate": 3.368349277980861e-05, |
|
"loss": 0.3042, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 1.6703797468354429, |
|
"grad_norm": 0.5129876802106906, |
|
"learning_rate": 3.3642042597272844e-05, |
|
"loss": 0.3143, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 1.6744303797468354, |
|
"grad_norm": 0.4584165162791264, |
|
"learning_rate": 3.360048254360144e-05, |
|
"loss": 0.3057, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 1.678481012658228, |
|
"grad_norm": 0.4181495084502889, |
|
"learning_rate": 3.355881295351336e-05, |
|
"loss": 0.2991, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 1.6825316455696202, |
|
"grad_norm": 0.4599049196770464, |
|
"learning_rate": 3.351703416260975e-05, |
|
"loss": 0.3039, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.6865822784810125, |
|
"grad_norm": 0.42928277311159857, |
|
"learning_rate": 3.347514650737126e-05, |
|
"loss": 0.3374, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 1.690632911392405, |
|
"grad_norm": 0.5250725973123629, |
|
"learning_rate": 3.3433150325155295e-05, |
|
"loss": 0.313, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 1.6946835443037975, |
|
"grad_norm": 0.453136502975571, |
|
"learning_rate": 3.339104595419334e-05, |
|
"loss": 0.3053, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 1.6987341772151898, |
|
"grad_norm": 0.4488733119110483, |
|
"learning_rate": 3.3348833733588204e-05, |
|
"loss": 0.3151, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 1.7027848101265821, |
|
"grad_norm": 0.3960936018232401, |
|
"learning_rate": 3.3306514003311305e-05, |
|
"loss": 0.3059, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.7068354430379746, |
|
"grad_norm": 0.45293842337030843, |
|
"learning_rate": 3.326408710419996e-05, |
|
"loss": 0.3135, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 1.7108860759493671, |
|
"grad_norm": 0.4105418611792868, |
|
"learning_rate": 3.322155337795454e-05, |
|
"loss": 0.3048, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 1.7149367088607594, |
|
"grad_norm": 0.4678367332993912, |
|
"learning_rate": 3.317891316713587e-05, |
|
"loss": 0.3058, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 1.7189873417721517, |
|
"grad_norm": 0.4112797998410265, |
|
"learning_rate": 3.313616681516231e-05, |
|
"loss": 0.3087, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 1.7230379746835442, |
|
"grad_norm": 0.47160713880315497, |
|
"learning_rate": 3.309331466630713e-05, |
|
"loss": 0.306, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.7270886075949368, |
|
"grad_norm": 0.436007377893416, |
|
"learning_rate": 3.305035706569563e-05, |
|
"loss": 0.3053, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 1.7311392405063293, |
|
"grad_norm": 0.46984485798474973, |
|
"learning_rate": 3.3007294359302433e-05, |
|
"loss": 0.3133, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 1.7351898734177216, |
|
"grad_norm": 0.44935345008935357, |
|
"learning_rate": 3.296412689394864e-05, |
|
"loss": 0.33, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 1.7392405063291139, |
|
"grad_norm": 0.4876571631964019, |
|
"learning_rate": 3.292085501729909e-05, |
|
"loss": 0.314, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 1.7432911392405064, |
|
"grad_norm": 0.37120750949146214, |
|
"learning_rate": 3.2877479077859534e-05, |
|
"loss": 0.2961, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.747341772151899, |
|
"grad_norm": 0.5065829734743708, |
|
"learning_rate": 3.283399942497381e-05, |
|
"loss": 0.3031, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 1.7513924050632912, |
|
"grad_norm": 0.41443762637792464, |
|
"learning_rate": 3.279041640882108e-05, |
|
"loss": 0.3071, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 1.7554430379746835, |
|
"grad_norm": 0.44446577364075934, |
|
"learning_rate": 3.2746730380412964e-05, |
|
"loss": 0.2951, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 1.759493670886076, |
|
"grad_norm": 0.445002250493089, |
|
"learning_rate": 3.2702941691590726e-05, |
|
"loss": 0.3045, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 1.7635443037974685, |
|
"grad_norm": 0.4098133397200715, |
|
"learning_rate": 3.265905069502244e-05, |
|
"loss": 0.3129, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.7675949367088608, |
|
"grad_norm": 0.41010379371699557, |
|
"learning_rate": 3.261505774420016e-05, |
|
"loss": 0.3096, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 1.771645569620253, |
|
"grad_norm": 0.43872806307536666, |
|
"learning_rate": 3.257096319343707e-05, |
|
"loss": 0.3261, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 1.7756962025316456, |
|
"grad_norm": 0.4504505684850502, |
|
"learning_rate": 3.2526767397864614e-05, |
|
"loss": 0.3141, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 1.7797468354430381, |
|
"grad_norm": 0.43866696000867067, |
|
"learning_rate": 3.248247071342966e-05, |
|
"loss": 0.3006, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 1.7837974683544304, |
|
"grad_norm": 0.4346254567628055, |
|
"learning_rate": 3.243807349689161e-05, |
|
"loss": 0.3026, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.7878481012658227, |
|
"grad_norm": 0.46273754662499755, |
|
"learning_rate": 3.2393576105819544e-05, |
|
"loss": 0.3153, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 1.7918987341772152, |
|
"grad_norm": 0.41829674204370954, |
|
"learning_rate": 3.2348978898589333e-05, |
|
"loss": 0.2962, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 1.7959493670886078, |
|
"grad_norm": 0.43485134101940387, |
|
"learning_rate": 3.230428223438075e-05, |
|
"loss": 0.2955, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.43836741902719545, |
|
"learning_rate": 3.225948647317459e-05, |
|
"loss": 0.3011, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 1.8040506329113923, |
|
"grad_norm": 0.4141936993519025, |
|
"learning_rate": 3.2214591975749745e-05, |
|
"loss": 0.3127, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.8081012658227849, |
|
"grad_norm": 0.4918584217591665, |
|
"learning_rate": 3.216959910368034e-05, |
|
"loss": 0.3095, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 1.8121518987341774, |
|
"grad_norm": 0.382089916550884, |
|
"learning_rate": 3.212450821933277e-05, |
|
"loss": 0.3101, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 1.8162025316455697, |
|
"grad_norm": 0.5121360281909645, |
|
"learning_rate": 3.207931968586281e-05, |
|
"loss": 0.2928, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 1.820253164556962, |
|
"grad_norm": 0.45935691550157737, |
|
"learning_rate": 3.203403386721272e-05, |
|
"loss": 0.3134, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 1.8243037974683545, |
|
"grad_norm": 0.5379215105177644, |
|
"learning_rate": 3.1988651128108245e-05, |
|
"loss": 0.3087, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.828354430379747, |
|
"grad_norm": 0.4387569518017159, |
|
"learning_rate": 3.194317183405573e-05, |
|
"loss": 0.3138, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 1.8324050632911393, |
|
"grad_norm": 0.47279265072734217, |
|
"learning_rate": 3.189759635133914e-05, |
|
"loss": 0.3218, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 1.8364556962025316, |
|
"grad_norm": 0.41832414356060166, |
|
"learning_rate": 3.185192504701718e-05, |
|
"loss": 0.3077, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 1.840506329113924, |
|
"grad_norm": 0.42463778380920997, |
|
"learning_rate": 3.1806158288920234e-05, |
|
"loss": 0.3178, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 1.8445569620253166, |
|
"grad_norm": 0.44221300363754845, |
|
"learning_rate": 3.1760296445647477e-05, |
|
"loss": 0.3079, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.848607594936709, |
|
"grad_norm": 0.39075083985037345, |
|
"learning_rate": 3.1714339886563896e-05, |
|
"loss": 0.3068, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.8526582278481012, |
|
"grad_norm": 0.40541478411746285, |
|
"learning_rate": 3.166828898179731e-05, |
|
"loss": 0.3196, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 1.8567088607594937, |
|
"grad_norm": 0.4250438381544162, |
|
"learning_rate": 3.162214410223536e-05, |
|
"loss": 0.3133, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 1.8607594936708862, |
|
"grad_norm": 0.3770389810884817, |
|
"learning_rate": 3.157590561952257e-05, |
|
"loss": 0.3121, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 1.8648101265822785, |
|
"grad_norm": 0.38901605360226016, |
|
"learning_rate": 3.152957390605732e-05, |
|
"loss": 0.3041, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.8688607594936708, |
|
"grad_norm": 0.43423653710222604, |
|
"learning_rate": 3.148314933498886e-05, |
|
"loss": 0.3129, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 1.8729113924050633, |
|
"grad_norm": 0.42022999873292727, |
|
"learning_rate": 3.143663228021431e-05, |
|
"loss": 0.3143, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.8769620253164558, |
|
"grad_norm": 0.39432733029561445, |
|
"learning_rate": 3.1390023116375624e-05, |
|
"loss": 0.3039, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 1.8810126582278481, |
|
"grad_norm": 0.4913918324749314, |
|
"learning_rate": 3.134332221885661e-05, |
|
"loss": 0.32, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 1.8850632911392404, |
|
"grad_norm": 0.3881210632510581, |
|
"learning_rate": 3.129652996377987e-05, |
|
"loss": 0.3148, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.889113924050633, |
|
"grad_norm": 0.4153569179311578, |
|
"learning_rate": 3.12496467280038e-05, |
|
"loss": 0.2986, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 1.8931645569620255, |
|
"grad_norm": 0.37947386179548565, |
|
"learning_rate": 3.120267288911952e-05, |
|
"loss": 0.3122, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 1.8972151898734178, |
|
"grad_norm": 0.45647946098407655, |
|
"learning_rate": 3.11556088254479e-05, |
|
"loss": 0.3115, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.90126582278481, |
|
"grad_norm": 0.3860176997114069, |
|
"learning_rate": 3.11084549160364e-05, |
|
"loss": 0.277, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 1.9053164556962026, |
|
"grad_norm": 0.4298901080371292, |
|
"learning_rate": 3.106121154065615e-05, |
|
"loss": 0.2858, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.909367088607595, |
|
"grad_norm": 0.3888995608594795, |
|
"learning_rate": 3.1013879079798805e-05, |
|
"loss": 0.3098, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 1.9134177215189874, |
|
"grad_norm": 0.4113563084170954, |
|
"learning_rate": 3.096645791467348e-05, |
|
"loss": 0.3116, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 1.9174683544303797, |
|
"grad_norm": 0.4026897721875418, |
|
"learning_rate": 3.091894842720373e-05, |
|
"loss": 0.2999, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 1.9215189873417722, |
|
"grad_norm": 0.3839364362730939, |
|
"learning_rate": 3.0871351000024425e-05, |
|
"loss": 0.3017, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 1.9255696202531647, |
|
"grad_norm": 0.4324870609340559, |
|
"learning_rate": 3.0823666016478716e-05, |
|
"loss": 0.308, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.929620253164557, |
|
"grad_norm": 0.39858419266991024, |
|
"learning_rate": 3.0775893860614896e-05, |
|
"loss": 0.2916, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 1.9336708860759493, |
|
"grad_norm": 0.40214426674941295, |
|
"learning_rate": 3.0728034917183336e-05, |
|
"loss": 0.2991, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 1.9377215189873418, |
|
"grad_norm": 0.4060371093481788, |
|
"learning_rate": 3.06800895716334e-05, |
|
"loss": 0.2995, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 1.9417721518987343, |
|
"grad_norm": 0.4265646726562536, |
|
"learning_rate": 3.063205821011029e-05, |
|
"loss": 0.3097, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 1.9458227848101266, |
|
"grad_norm": 0.45940343826290897, |
|
"learning_rate": 3.0583941219452016e-05, |
|
"loss": 0.3129, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.949873417721519, |
|
"grad_norm": 0.43090329922835074, |
|
"learning_rate": 3.053573898718618e-05, |
|
"loss": 0.2919, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 1.9539240506329114, |
|
"grad_norm": 0.430179644011138, |
|
"learning_rate": 3.0487451901526956e-05, |
|
"loss": 0.3196, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 1.957974683544304, |
|
"grad_norm": 0.4868079293136439, |
|
"learning_rate": 3.0439080351371875e-05, |
|
"loss": 0.299, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 1.9620253164556962, |
|
"grad_norm": 0.45665611481143414, |
|
"learning_rate": 3.0390624726298764e-05, |
|
"loss": 0.3008, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 1.9660759493670885, |
|
"grad_norm": 0.43096065435317993, |
|
"learning_rate": 3.034208541656255e-05, |
|
"loss": 0.3144, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.970126582278481, |
|
"grad_norm": 0.4800292257781219, |
|
"learning_rate": 3.029346281309218e-05, |
|
"loss": 0.3164, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.9741772151898735, |
|
"grad_norm": 0.42052216079035115, |
|
"learning_rate": 3.0244757307487415e-05, |
|
"loss": 0.3116, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 1.9782278481012658, |
|
"grad_norm": 0.42600552785275636, |
|
"learning_rate": 3.019596929201569e-05, |
|
"loss": 0.2861, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 1.9822784810126581, |
|
"grad_norm": 0.4293501441635794, |
|
"learning_rate": 3.0147099159608985e-05, |
|
"loss": 0.3063, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 1.9863291139240506, |
|
"grad_norm": 0.4238773144298707, |
|
"learning_rate": 3.0098147303860616e-05, |
|
"loss": 0.3037, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.9903797468354432, |
|
"grad_norm": 0.44870059213272473, |
|
"learning_rate": 3.0049114119022117e-05, |
|
"loss": 0.3079, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 1.9944303797468355, |
|
"grad_norm": 0.40328925270782884, |
|
"learning_rate": 3.0000000000000004e-05, |
|
"loss": 0.2967, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.9984810126582278, |
|
"grad_norm": 0.4678696955949261, |
|
"learning_rate": 2.995080534235264e-05, |
|
"loss": 0.2848, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 2.0040506329113925, |
|
"grad_norm": 0.572544415709398, |
|
"learning_rate": 2.9901530542287044e-05, |
|
"loss": 0.2247, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 2.008101265822785, |
|
"grad_norm": 0.4694749742925413, |
|
"learning_rate": 2.9852175996655676e-05, |
|
"loss": 0.2194, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 2.012151898734177, |
|
"grad_norm": 0.7473571776738978, |
|
"learning_rate": 2.980274210295326e-05, |
|
"loss": 0.2249, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 2.0162025316455696, |
|
"grad_norm": 0.5435551220806855, |
|
"learning_rate": 2.9753229259313578e-05, |
|
"loss": 0.2051, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 2.020253164556962, |
|
"grad_norm": 0.46950663571656837, |
|
"learning_rate": 2.9703637864506274e-05, |
|
"loss": 0.2054, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 2.0243037974683546, |
|
"grad_norm": 0.5284830298038005, |
|
"learning_rate": 2.965396831793362e-05, |
|
"loss": 0.2046, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 2.0283544303797467, |
|
"grad_norm": 0.4666144288416204, |
|
"learning_rate": 2.9604221019627316e-05, |
|
"loss": 0.2038, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.0324050632911392, |
|
"grad_norm": 0.45787058663479013, |
|
"learning_rate": 2.955439637024526e-05, |
|
"loss": 0.2241, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 2.0364556962025318, |
|
"grad_norm": 0.4285686691979659, |
|
"learning_rate": 2.9504494771068334e-05, |
|
"loss": 0.2078, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 2.0405063291139243, |
|
"grad_norm": 0.45569703956953755, |
|
"learning_rate": 2.9454516623997156e-05, |
|
"loss": 0.1955, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 2.0445569620253163, |
|
"grad_norm": 0.4161998147867102, |
|
"learning_rate": 2.9404462331548847e-05, |
|
"loss": 0.2099, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 2.048607594936709, |
|
"grad_norm": 0.4482267491558278, |
|
"learning_rate": 2.93543322968538e-05, |
|
"loss": 0.2099, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 2.0526582278481014, |
|
"grad_norm": 0.4472005190155503, |
|
"learning_rate": 2.9304126923652428e-05, |
|
"loss": 0.2082, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 2.056708860759494, |
|
"grad_norm": 0.4369493976696981, |
|
"learning_rate": 2.9253846616291896e-05, |
|
"loss": 0.2183, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 2.060759493670886, |
|
"grad_norm": 0.45149255075042577, |
|
"learning_rate": 2.9203491779722896e-05, |
|
"loss": 0.2075, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 2.0648101265822785, |
|
"grad_norm": 0.3997517936275642, |
|
"learning_rate": 2.9153062819496357e-05, |
|
"loss": 0.2102, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 2.068860759493671, |
|
"grad_norm": 0.4215086095755418, |
|
"learning_rate": 2.9102560141760178e-05, |
|
"loss": 0.2143, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.0729113924050635, |
|
"grad_norm": 0.4180787707753618, |
|
"learning_rate": 2.9051984153256004e-05, |
|
"loss": 0.2108, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 2.0769620253164556, |
|
"grad_norm": 0.39466708319954, |
|
"learning_rate": 2.900133526131588e-05, |
|
"loss": 0.1983, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 2.081012658227848, |
|
"grad_norm": 0.4120512871841792, |
|
"learning_rate": 2.8950613873859025e-05, |
|
"loss": 0.1977, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 2.0850632911392406, |
|
"grad_norm": 0.4129512898442422, |
|
"learning_rate": 2.8899820399388515e-05, |
|
"loss": 0.2004, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 2.089113924050633, |
|
"grad_norm": 0.42304630250129993, |
|
"learning_rate": 2.8848955246988012e-05, |
|
"loss": 0.2178, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 2.093164556962025, |
|
"grad_norm": 0.4184668886661094, |
|
"learning_rate": 2.879801882631847e-05, |
|
"loss": 0.213, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 2.0972151898734177, |
|
"grad_norm": 0.37103847027939185, |
|
"learning_rate": 2.8747011547614808e-05, |
|
"loss": 0.1975, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 2.1012658227848102, |
|
"grad_norm": 0.44634527587271555, |
|
"learning_rate": 2.8695933821682635e-05, |
|
"loss": 0.2032, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 2.1053164556962027, |
|
"grad_norm": 0.42216950293440264, |
|
"learning_rate": 2.864478605989494e-05, |
|
"loss": 0.2217, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 2.109367088607595, |
|
"grad_norm": 0.46050600849938855, |
|
"learning_rate": 2.8593568674188765e-05, |
|
"loss": 0.2076, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.1134177215189873, |
|
"grad_norm": 0.3966098814902982, |
|
"learning_rate": 2.8542282077061892e-05, |
|
"loss": 0.1853, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 2.11746835443038, |
|
"grad_norm": 0.46790198714419956, |
|
"learning_rate": 2.8490926681569523e-05, |
|
"loss": 0.2084, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 2.1215189873417724, |
|
"grad_norm": 0.4083512652004032, |
|
"learning_rate": 2.8439502901320956e-05, |
|
"loss": 0.1976, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 2.1255696202531644, |
|
"grad_norm": 0.4522048271471654, |
|
"learning_rate": 2.8388011150476237e-05, |
|
"loss": 0.197, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 2.129620253164557, |
|
"grad_norm": 0.4360138339667211, |
|
"learning_rate": 2.8336451843742866e-05, |
|
"loss": 0.1971, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 2.1336708860759495, |
|
"grad_norm": 0.4634161689873586, |
|
"learning_rate": 2.8284825396372387e-05, |
|
"loss": 0.2058, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 2.137721518987342, |
|
"grad_norm": 0.4258229956574022, |
|
"learning_rate": 2.8233132224157132e-05, |
|
"loss": 0.1835, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 2.141772151898734, |
|
"grad_norm": 0.4354499590756884, |
|
"learning_rate": 2.8181372743426805e-05, |
|
"loss": 0.1884, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 2.1458227848101266, |
|
"grad_norm": 0.40551265586182844, |
|
"learning_rate": 2.8129547371045128e-05, |
|
"loss": 0.214, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 2.149873417721519, |
|
"grad_norm": 0.4293813127341251, |
|
"learning_rate": 2.8077656524406534e-05, |
|
"loss": 0.2018, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.1539240506329116, |
|
"grad_norm": 0.4270480795606867, |
|
"learning_rate": 2.802570062143278e-05, |
|
"loss": 0.2119, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 2.1579746835443037, |
|
"grad_norm": 0.37121984984220363, |
|
"learning_rate": 2.7973680080569555e-05, |
|
"loss": 0.2034, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 2.162025316455696, |
|
"grad_norm": 0.47125462054815787, |
|
"learning_rate": 2.792159532078314e-05, |
|
"loss": 0.2081, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 2.1660759493670887, |
|
"grad_norm": 0.42004319886774, |
|
"learning_rate": 2.7869446761557033e-05, |
|
"loss": 0.2139, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 2.170126582278481, |
|
"grad_norm": 0.43982847710580525, |
|
"learning_rate": 2.781723482288857e-05, |
|
"loss": 0.2259, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 2.1741772151898733, |
|
"grad_norm": 0.4504671555217441, |
|
"learning_rate": 2.7764959925285517e-05, |
|
"loss": 0.1893, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 2.178227848101266, |
|
"grad_norm": 0.4050139165873286, |
|
"learning_rate": 2.771262248976272e-05, |
|
"loss": 0.2006, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 2.1822784810126583, |
|
"grad_norm": 0.4220792925741639, |
|
"learning_rate": 2.7660222937838677e-05, |
|
"loss": 0.2008, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 2.186329113924051, |
|
"grad_norm": 0.4270897451451, |
|
"learning_rate": 2.7607761691532186e-05, |
|
"loss": 0.2149, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 2.190379746835443, |
|
"grad_norm": 0.3933051622772489, |
|
"learning_rate": 2.7555239173358916e-05, |
|
"loss": 0.198, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.1944303797468354, |
|
"grad_norm": 0.4122357596432804, |
|
"learning_rate": 2.7502655806328e-05, |
|
"loss": 0.2202, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 2.198481012658228, |
|
"grad_norm": 0.4280413295164126, |
|
"learning_rate": 2.7450012013938648e-05, |
|
"loss": 0.19, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 2.2025316455696204, |
|
"grad_norm": 0.3638732019749099, |
|
"learning_rate": 2.739730822017673e-05, |
|
"loss": 0.2048, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 2.2065822784810125, |
|
"grad_norm": 0.41551720046809965, |
|
"learning_rate": 2.7344544849511355e-05, |
|
"loss": 0.1994, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 2.210632911392405, |
|
"grad_norm": 0.36709640503726376, |
|
"learning_rate": 2.7291722326891456e-05, |
|
"loss": 0.2041, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 2.2146835443037975, |
|
"grad_norm": 0.3609873081044122, |
|
"learning_rate": 2.723884107774236e-05, |
|
"loss": 0.2136, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 2.21873417721519, |
|
"grad_norm": 0.38883014978037117, |
|
"learning_rate": 2.718590152796239e-05, |
|
"loss": 0.2055, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 2.222784810126582, |
|
"grad_norm": 0.38699724695829957, |
|
"learning_rate": 2.71329041039194e-05, |
|
"loss": 0.2193, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 2.2268354430379746, |
|
"grad_norm": 0.3948586256575016, |
|
"learning_rate": 2.7079849232447357e-05, |
|
"loss": 0.1905, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 2.230886075949367, |
|
"grad_norm": 0.3985305704365128, |
|
"learning_rate": 2.7026737340842895e-05, |
|
"loss": 0.2049, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.2349367088607597, |
|
"grad_norm": 0.40134357695021067, |
|
"learning_rate": 2.697356885686189e-05, |
|
"loss": 0.2143, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 2.2389873417721518, |
|
"grad_norm": 0.412584953553908, |
|
"learning_rate": 2.6920344208716014e-05, |
|
"loss": 0.2094, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 2.2430379746835443, |
|
"grad_norm": 0.4070251152448579, |
|
"learning_rate": 2.6867063825069252e-05, |
|
"loss": 0.2011, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 2.247088607594937, |
|
"grad_norm": 0.36901064673768486, |
|
"learning_rate": 2.6813728135034494e-05, |
|
"loss": 0.2138, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 2.2511392405063293, |
|
"grad_norm": 0.3865100891330635, |
|
"learning_rate": 2.6760337568170056e-05, |
|
"loss": 0.2064, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 2.2551898734177214, |
|
"grad_norm": 0.3818324933091878, |
|
"learning_rate": 2.6706892554476226e-05, |
|
"loss": 0.2136, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 2.259240506329114, |
|
"grad_norm": 0.4194792433858756, |
|
"learning_rate": 2.6653393524391795e-05, |
|
"loss": 0.2071, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 2.2632911392405064, |
|
"grad_norm": 0.4224351102108334, |
|
"learning_rate": 2.6599840908790592e-05, |
|
"loss": 0.1956, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 2.267341772151899, |
|
"grad_norm": 0.4061750703331563, |
|
"learning_rate": 2.6546235138978028e-05, |
|
"loss": 0.217, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 2.271392405063291, |
|
"grad_norm": 0.452696170888799, |
|
"learning_rate": 2.6492576646687597e-05, |
|
"loss": 0.2057, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.2754430379746835, |
|
"grad_norm": 0.3814310378327831, |
|
"learning_rate": 2.6438865864077425e-05, |
|
"loss": 0.2152, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 2.279493670886076, |
|
"grad_norm": 0.4489896564073129, |
|
"learning_rate": 2.6385103223726766e-05, |
|
"loss": 0.2226, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 2.2835443037974685, |
|
"grad_norm": 0.4478490454160394, |
|
"learning_rate": 2.6331289158632537e-05, |
|
"loss": 0.2107, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 2.2875949367088606, |
|
"grad_norm": 0.41017584733480617, |
|
"learning_rate": 2.6277424102205817e-05, |
|
"loss": 0.2284, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 2.291645569620253, |
|
"grad_norm": 0.4493156703706004, |
|
"learning_rate": 2.6223508488268374e-05, |
|
"loss": 0.1942, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 2.2956962025316456, |
|
"grad_norm": 0.4585479343025521, |
|
"learning_rate": 2.6169542751049148e-05, |
|
"loss": 0.22, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 2.299746835443038, |
|
"grad_norm": 0.43541916329042013, |
|
"learning_rate": 2.6115527325180754e-05, |
|
"loss": 0.2286, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 2.3037974683544302, |
|
"grad_norm": 0.43858031339268033, |
|
"learning_rate": 2.606146264569603e-05, |
|
"loss": 0.2005, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 2.3078481012658227, |
|
"grad_norm": 0.4098047216559912, |
|
"learning_rate": 2.6007349148024447e-05, |
|
"loss": 0.2133, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 2.3118987341772153, |
|
"grad_norm": 0.4016159797295286, |
|
"learning_rate": 2.5953187267988694e-05, |
|
"loss": 0.2073, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.3159493670886078, |
|
"grad_norm": 0.3959384745709861, |
|
"learning_rate": 2.5898977441801097e-05, |
|
"loss": 0.1922, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 0.4141760704225496, |
|
"learning_rate": 2.584472010606015e-05, |
|
"loss": 0.2012, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 2.3240506329113924, |
|
"grad_norm": 0.3847531934189166, |
|
"learning_rate": 2.5790415697746976e-05, |
|
"loss": 0.2069, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 2.328101265822785, |
|
"grad_norm": 0.46272584261492855, |
|
"learning_rate": 2.5736064654221808e-05, |
|
"loss": 0.2155, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 2.3321518987341774, |
|
"grad_norm": 0.3605554219790833, |
|
"learning_rate": 2.568166741322048e-05, |
|
"loss": 0.2176, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 2.3362025316455695, |
|
"grad_norm": 0.4381405263361073, |
|
"learning_rate": 2.56272244128509e-05, |
|
"loss": 0.2138, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 2.340253164556962, |
|
"grad_norm": 0.36761640829652237, |
|
"learning_rate": 2.55727360915895e-05, |
|
"loss": 0.2092, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 2.3443037974683545, |
|
"grad_norm": 0.35713557127164997, |
|
"learning_rate": 2.5518202888277734e-05, |
|
"loss": 0.1936, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 2.348354430379747, |
|
"grad_norm": 0.4084457344136539, |
|
"learning_rate": 2.5463625242118523e-05, |
|
"loss": 0.2146, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 2.352405063291139, |
|
"grad_norm": 0.37065074465173337, |
|
"learning_rate": 2.5409003592672723e-05, |
|
"loss": 0.2061, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.3564556962025316, |
|
"grad_norm": 0.3982321117932458, |
|
"learning_rate": 2.535433837985559e-05, |
|
"loss": 0.2095, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 2.360506329113924, |
|
"grad_norm": 0.39762838160838165, |
|
"learning_rate": 2.529963004393324e-05, |
|
"loss": 0.2051, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 2.3645569620253166, |
|
"grad_norm": 0.41219457649320823, |
|
"learning_rate": 2.524487902551908e-05, |
|
"loss": 0.2172, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 2.3686075949367087, |
|
"grad_norm": 0.38545945411394006, |
|
"learning_rate": 2.519008576557029e-05, |
|
"loss": 0.1983, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 2.372658227848101, |
|
"grad_norm": 0.4108449834249282, |
|
"learning_rate": 2.5135250705384254e-05, |
|
"loss": 0.2092, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 2.3767088607594937, |
|
"grad_norm": 0.3921270033478677, |
|
"learning_rate": 2.5080374286595007e-05, |
|
"loss": 0.2184, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 2.3807594936708862, |
|
"grad_norm": 0.3797986518171786, |
|
"learning_rate": 2.5025456951169677e-05, |
|
"loss": 0.1997, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 2.3848101265822783, |
|
"grad_norm": 0.43126599575640356, |
|
"learning_rate": 2.4970499141404942e-05, |
|
"loss": 0.2129, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 2.388860759493671, |
|
"grad_norm": 0.3992721126153027, |
|
"learning_rate": 2.491550129992345e-05, |
|
"loss": 0.211, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 2.3929113924050633, |
|
"grad_norm": 0.39082846019704953, |
|
"learning_rate": 2.486046386967024e-05, |
|
"loss": 0.2134, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.396962025316456, |
|
"grad_norm": 0.40861616381767013, |
|
"learning_rate": 2.4805387293909214e-05, |
|
"loss": 0.1906, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 2.401012658227848, |
|
"grad_norm": 0.37444261775277665, |
|
"learning_rate": 2.4750272016219552e-05, |
|
"loss": 0.2294, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 2.4050632911392404, |
|
"grad_norm": 0.4024871630435139, |
|
"learning_rate": 2.4695118480492114e-05, |
|
"loss": 0.2161, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 2.409113924050633, |
|
"grad_norm": 0.39077342244853436, |
|
"learning_rate": 2.4639927130925898e-05, |
|
"loss": 0.2109, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 2.4131645569620255, |
|
"grad_norm": 0.37015314007578415, |
|
"learning_rate": 2.458469841202444e-05, |
|
"loss": 0.2018, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 2.4172151898734175, |
|
"grad_norm": 0.3810921702122006, |
|
"learning_rate": 2.452943276859226e-05, |
|
"loss": 0.2011, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 2.42126582278481, |
|
"grad_norm": 0.3833152383121641, |
|
"learning_rate": 2.447413064573125e-05, |
|
"loss": 0.2015, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 2.4253164556962026, |
|
"grad_norm": 0.40754486089805997, |
|
"learning_rate": 2.4418792488837095e-05, |
|
"loss": 0.1958, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 2.429367088607595, |
|
"grad_norm": 0.3762958071666607, |
|
"learning_rate": 2.4363418743595713e-05, |
|
"loss": 0.207, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 2.433417721518987, |
|
"grad_norm": 0.37422393308332674, |
|
"learning_rate": 2.430800985597963e-05, |
|
"loss": 0.1889, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.4374683544303797, |
|
"grad_norm": 0.3777729711792525, |
|
"learning_rate": 2.4252566272244415e-05, |
|
"loss": 0.2035, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 2.441518987341772, |
|
"grad_norm": 0.41914473907852856, |
|
"learning_rate": 2.4197088438925063e-05, |
|
"loss": 0.2094, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 2.4455696202531647, |
|
"grad_norm": 0.3828218692943679, |
|
"learning_rate": 2.4141576802832417e-05, |
|
"loss": 0.2005, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 2.449620253164557, |
|
"grad_norm": 0.40477146806604525, |
|
"learning_rate": 2.408603181104957e-05, |
|
"loss": 0.2083, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 2.4536708860759493, |
|
"grad_norm": 0.4204808050130909, |
|
"learning_rate": 2.4030453910928245e-05, |
|
"loss": 0.2084, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 2.457721518987342, |
|
"grad_norm": 0.3708718211177949, |
|
"learning_rate": 2.397484355008521e-05, |
|
"loss": 0.2103, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 2.4617721518987343, |
|
"grad_norm": 0.37670576427693114, |
|
"learning_rate": 2.3919201176398662e-05, |
|
"loss": 0.2142, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 2.4658227848101264, |
|
"grad_norm": 0.4064354286096273, |
|
"learning_rate": 2.3863527238004633e-05, |
|
"loss": 0.2074, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 2.469873417721519, |
|
"grad_norm": 0.4244070769321671, |
|
"learning_rate": 2.380782218329337e-05, |
|
"loss": 0.2228, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 2.4739240506329114, |
|
"grad_norm": 0.4075677908157438, |
|
"learning_rate": 2.3752086460905725e-05, |
|
"loss": 0.2055, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.477974683544304, |
|
"grad_norm": 0.38548592295454265, |
|
"learning_rate": 2.3696320519729544e-05, |
|
"loss": 0.2069, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 2.482025316455696, |
|
"grad_norm": 0.4096245573415293, |
|
"learning_rate": 2.3640524808896045e-05, |
|
"loss": 0.2089, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 2.4860759493670885, |
|
"grad_norm": 0.4266988478557631, |
|
"learning_rate": 2.3584699777776222e-05, |
|
"loss": 0.2112, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 2.490126582278481, |
|
"grad_norm": 0.4123779249659944, |
|
"learning_rate": 2.3528845875977195e-05, |
|
"loss": 0.1989, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 2.4941772151898736, |
|
"grad_norm": 0.38760932907427637, |
|
"learning_rate": 2.3472963553338614e-05, |
|
"loss": 0.2049, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 2.4982278481012656, |
|
"grad_norm": 0.4368948490514543, |
|
"learning_rate": 2.341705325992901e-05, |
|
"loss": 0.2124, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 2.502278481012658, |
|
"grad_norm": 0.3845790481275663, |
|
"learning_rate": 2.336111544604222e-05, |
|
"loss": 0.1974, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 2.5063291139240507, |
|
"grad_norm": 0.4176179291152113, |
|
"learning_rate": 2.33051505621937e-05, |
|
"loss": 0.2028, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 2.510379746835443, |
|
"grad_norm": 0.3851667119640344, |
|
"learning_rate": 2.324915905911693e-05, |
|
"loss": 0.2007, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 2.5144303797468357, |
|
"grad_norm": 0.3860028970790123, |
|
"learning_rate": 2.319314138775977e-05, |
|
"loss": 0.219, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.5184810126582278, |
|
"grad_norm": 0.3932724992015882, |
|
"learning_rate": 2.3137097999280856e-05, |
|
"loss": 0.2146, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 2.5225316455696203, |
|
"grad_norm": 0.3806388604241651, |
|
"learning_rate": 2.308102934504593e-05, |
|
"loss": 0.206, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 2.526582278481013, |
|
"grad_norm": 0.43793598744546786, |
|
"learning_rate": 2.3024935876624222e-05, |
|
"loss": 0.2127, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 2.530632911392405, |
|
"grad_norm": 0.3784957616956403, |
|
"learning_rate": 2.2968818045784813e-05, |
|
"loss": 0.2025, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 2.5346835443037974, |
|
"grad_norm": 0.39957234557196647, |
|
"learning_rate": 2.2912676304493006e-05, |
|
"loss": 0.2204, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 2.53873417721519, |
|
"grad_norm": 0.43026433283067245, |
|
"learning_rate": 2.2856511104906668e-05, |
|
"loss": 0.1958, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 2.5427848101265824, |
|
"grad_norm": 0.4134436803884183, |
|
"learning_rate": 2.2800322899372586e-05, |
|
"loss": 0.2086, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 2.546835443037975, |
|
"grad_norm": 0.39377802231665465, |
|
"learning_rate": 2.2744112140422844e-05, |
|
"loss": 0.2001, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 2.550886075949367, |
|
"grad_norm": 0.4101030305554862, |
|
"learning_rate": 2.2687879280771177e-05, |
|
"loss": 0.2226, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 2.5549367088607595, |
|
"grad_norm": 0.4285947480911037, |
|
"learning_rate": 2.26316247733093e-05, |
|
"loss": 0.2114, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.558987341772152, |
|
"grad_norm": 0.3829728687800281, |
|
"learning_rate": 2.257534907110328e-05, |
|
"loss": 0.2138, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 2.563037974683544, |
|
"grad_norm": 0.42024446871737253, |
|
"learning_rate": 2.2519052627389882e-05, |
|
"loss": 0.199, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 2.5670886075949366, |
|
"grad_norm": 0.4526057333360632, |
|
"learning_rate": 2.246273589557294e-05, |
|
"loss": 0.2018, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 2.571139240506329, |
|
"grad_norm": 0.4941211014326949, |
|
"learning_rate": 2.240639932921966e-05, |
|
"loss": 0.2037, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 2.5751898734177217, |
|
"grad_norm": 0.46485483211118683, |
|
"learning_rate": 2.2350043382056995e-05, |
|
"loss": 0.2008, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 2.579240506329114, |
|
"grad_norm": 0.4331860731355648, |
|
"learning_rate": 2.2293668507968015e-05, |
|
"loss": 0.2065, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 2.5832911392405062, |
|
"grad_norm": 0.4819822540384287, |
|
"learning_rate": 2.2237275160988186e-05, |
|
"loss": 0.2108, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 2.5873417721518988, |
|
"grad_norm": 0.4537947792326141, |
|
"learning_rate": 2.2180863795301787e-05, |
|
"loss": 0.2121, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 2.5913924050632913, |
|
"grad_norm": 0.39369128640982165, |
|
"learning_rate": 2.212443486523819e-05, |
|
"loss": 0.1983, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 2.5954430379746833, |
|
"grad_norm": 0.4279312207509106, |
|
"learning_rate": 2.2067988825268243e-05, |
|
"loss": 0.1961, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.599493670886076, |
|
"grad_norm": 0.4614613321888851, |
|
"learning_rate": 2.2011526130000596e-05, |
|
"loss": 0.201, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 2.6035443037974684, |
|
"grad_norm": 0.3763844462362313, |
|
"learning_rate": 2.1955047234178038e-05, |
|
"loss": 0.2021, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 2.607594936708861, |
|
"grad_norm": 0.39695071010689176, |
|
"learning_rate": 2.1898552592673825e-05, |
|
"loss": 0.1986, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 2.6116455696202534, |
|
"grad_norm": 0.46112690630041053, |
|
"learning_rate": 2.184204266048803e-05, |
|
"loss": 0.2043, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 2.6156962025316455, |
|
"grad_norm": 0.4158943493149077, |
|
"learning_rate": 2.1785517892743887e-05, |
|
"loss": 0.2029, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 2.619746835443038, |
|
"grad_norm": 0.38356163141703364, |
|
"learning_rate": 2.17289787446841e-05, |
|
"loss": 0.2273, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 2.6237974683544305, |
|
"grad_norm": 0.4508732263349279, |
|
"learning_rate": 2.1672425671667198e-05, |
|
"loss": 0.2045, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 2.6278481012658226, |
|
"grad_norm": 0.4314831942972861, |
|
"learning_rate": 2.161585912916385e-05, |
|
"loss": 0.2011, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 2.631898734177215, |
|
"grad_norm": 0.37784401096739195, |
|
"learning_rate": 2.1559279572753214e-05, |
|
"loss": 0.1935, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 2.6359493670886076, |
|
"grad_norm": 0.5170777444216382, |
|
"learning_rate": 2.1502687458119268e-05, |
|
"loss": 0.2063, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 0.4268492908817238, |
|
"learning_rate": 2.1446083241047116e-05, |
|
"loss": 0.2221, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 2.6440506329113926, |
|
"grad_norm": 0.39736896304580416, |
|
"learning_rate": 2.1389467377419333e-05, |
|
"loss": 0.2185, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 2.6481012658227847, |
|
"grad_norm": 0.4956889530307039, |
|
"learning_rate": 2.133284032321232e-05, |
|
"loss": 0.2174, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 2.6521518987341772, |
|
"grad_norm": 0.42244657447799094, |
|
"learning_rate": 2.1276202534492566e-05, |
|
"loss": 0.2145, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 2.6562025316455697, |
|
"grad_norm": 0.382571955243392, |
|
"learning_rate": 2.121955446741306e-05, |
|
"loss": 0.2142, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 2.660253164556962, |
|
"grad_norm": 0.4070605337471735, |
|
"learning_rate": 2.1162896578209517e-05, |
|
"loss": 0.202, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 2.6643037974683543, |
|
"grad_norm": 0.38435158408121384, |
|
"learning_rate": 2.1106229323196813e-05, |
|
"loss": 0.1891, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 2.668354430379747, |
|
"grad_norm": 0.3974147779697317, |
|
"learning_rate": 2.1049553158765214e-05, |
|
"loss": 0.2046, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 2.6724050632911394, |
|
"grad_norm": 0.3902353426706327, |
|
"learning_rate": 2.0992868541376764e-05, |
|
"loss": 0.209, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 2.676455696202532, |
|
"grad_norm": 0.38995803519991934, |
|
"learning_rate": 2.093617592756158e-05, |
|
"loss": 0.2148, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.680506329113924, |
|
"grad_norm": 0.3824191560112279, |
|
"learning_rate": 2.0879475773914167e-05, |
|
"loss": 0.2251, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 2.6845569620253165, |
|
"grad_norm": 0.38814652370919234, |
|
"learning_rate": 2.082276853708978e-05, |
|
"loss": 0.2001, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 2.688607594936709, |
|
"grad_norm": 0.37485914502600515, |
|
"learning_rate": 2.076605467380071e-05, |
|
"loss": 0.2089, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 2.692658227848101, |
|
"grad_norm": 0.36171195847964166, |
|
"learning_rate": 2.0709334640812613e-05, |
|
"loss": 0.2006, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 2.6967088607594936, |
|
"grad_norm": 0.37521846791496466, |
|
"learning_rate": 2.0652608894940824e-05, |
|
"loss": 0.2054, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 2.700759493670886, |
|
"grad_norm": 0.4037332299953059, |
|
"learning_rate": 2.0595877893046722e-05, |
|
"loss": 0.2259, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 2.7048101265822786, |
|
"grad_norm": 0.3673048116602619, |
|
"learning_rate": 2.0539142092033985e-05, |
|
"loss": 0.2103, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 2.708860759493671, |
|
"grad_norm": 0.41123827576369887, |
|
"learning_rate": 2.048240194884496e-05, |
|
"loss": 0.2169, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 2.712911392405063, |
|
"grad_norm": 0.3833081357215822, |
|
"learning_rate": 2.042565792045695e-05, |
|
"loss": 0.2085, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 2.7169620253164557, |
|
"grad_norm": 0.41392801611609664, |
|
"learning_rate": 2.036891046387857e-05, |
|
"loss": 0.2018, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.721012658227848, |
|
"grad_norm": 0.40183764762732815, |
|
"learning_rate": 2.0312160036146036e-05, |
|
"loss": 0.2083, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 2.7250632911392403, |
|
"grad_norm": 0.3784878421340116, |
|
"learning_rate": 2.025540709431948e-05, |
|
"loss": 0.2003, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 2.729113924050633, |
|
"grad_norm": 0.42322049549031965, |
|
"learning_rate": 2.0198652095479298e-05, |
|
"loss": 0.2058, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 2.7331645569620253, |
|
"grad_norm": 0.3950708876306594, |
|
"learning_rate": 2.014189549672245e-05, |
|
"loss": 0.2063, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 2.737215189873418, |
|
"grad_norm": 0.40264872206804897, |
|
"learning_rate": 2.0085137755158776e-05, |
|
"loss": 0.2076, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 2.7412658227848103, |
|
"grad_norm": 0.37399076968525824, |
|
"learning_rate": 2.0028379327907327e-05, |
|
"loss": 0.2007, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 2.7453164556962024, |
|
"grad_norm": 0.3955450520532515, |
|
"learning_rate": 1.9971620672092676e-05, |
|
"loss": 0.2196, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 2.749367088607595, |
|
"grad_norm": 0.3743193481182695, |
|
"learning_rate": 1.991486224484123e-05, |
|
"loss": 0.2059, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 2.7534177215189874, |
|
"grad_norm": 0.3668561549393674, |
|
"learning_rate": 1.985810450327756e-05, |
|
"loss": 0.2003, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 2.7574683544303795, |
|
"grad_norm": 0.3841876935364412, |
|
"learning_rate": 1.9801347904520706e-05, |
|
"loss": 0.2011, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.761518987341772, |
|
"grad_norm": 0.3609622265656133, |
|
"learning_rate": 1.974459290568053e-05, |
|
"loss": 0.2029, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 2.7655696202531646, |
|
"grad_norm": 0.3882645046417306, |
|
"learning_rate": 1.968783996385397e-05, |
|
"loss": 0.2091, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 2.769620253164557, |
|
"grad_norm": 0.37512206066452086, |
|
"learning_rate": 1.963108953612143e-05, |
|
"loss": 0.1958, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 2.7736708860759496, |
|
"grad_norm": 0.3973447146047277, |
|
"learning_rate": 1.9574342079543056e-05, |
|
"loss": 0.1948, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 2.7777215189873417, |
|
"grad_norm": 0.3850473826033356, |
|
"learning_rate": 1.9517598051155046e-05, |
|
"loss": 0.2028, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 2.781772151898734, |
|
"grad_norm": 0.4215504036792543, |
|
"learning_rate": 1.9460857907966025e-05, |
|
"loss": 0.2043, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 2.7858227848101267, |
|
"grad_norm": 0.38833621409452973, |
|
"learning_rate": 1.9404122106953285e-05, |
|
"loss": 0.2015, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 2.7898734177215188, |
|
"grad_norm": 0.3772923960998295, |
|
"learning_rate": 1.9347391105059176e-05, |
|
"loss": 0.2194, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 2.7939240506329113, |
|
"grad_norm": 0.3688253377119613, |
|
"learning_rate": 1.92906653591874e-05, |
|
"loss": 0.1989, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 2.797974683544304, |
|
"grad_norm": 0.3672802177659729, |
|
"learning_rate": 1.9233945326199295e-05, |
|
"loss": 0.2079, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.8020253164556963, |
|
"grad_norm": 0.3805330263401076, |
|
"learning_rate": 1.917723146291022e-05, |
|
"loss": 0.2099, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 2.806075949367089, |
|
"grad_norm": 0.3924512566520626, |
|
"learning_rate": 1.912052422608584e-05, |
|
"loss": 0.205, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 2.810126582278481, |
|
"grad_norm": 0.3510771418394491, |
|
"learning_rate": 1.9063824072438428e-05, |
|
"loss": 0.2062, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 2.8141772151898734, |
|
"grad_norm": 0.4252905843296976, |
|
"learning_rate": 1.9007131458623246e-05, |
|
"loss": 0.2082, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 2.818227848101266, |
|
"grad_norm": 0.36860217716392724, |
|
"learning_rate": 1.895044684123479e-05, |
|
"loss": 0.2207, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 2.822278481012658, |
|
"grad_norm": 0.3762876464785527, |
|
"learning_rate": 1.8893770676803194e-05, |
|
"loss": 0.2103, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 2.8263291139240505, |
|
"grad_norm": 0.40971414152642255, |
|
"learning_rate": 1.8837103421790486e-05, |
|
"loss": 0.1985, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 2.830379746835443, |
|
"grad_norm": 0.3765541129822229, |
|
"learning_rate": 1.8780445532586952e-05, |
|
"loss": 0.2107, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 2.8344303797468355, |
|
"grad_norm": 0.37759115111268804, |
|
"learning_rate": 1.872379746550743e-05, |
|
"loss": 0.1982, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 2.838481012658228, |
|
"grad_norm": 0.37572012191958887, |
|
"learning_rate": 1.866715967678769e-05, |
|
"loss": 0.2151, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.84253164556962, |
|
"grad_norm": 0.3681377999868957, |
|
"learning_rate": 1.861053262258067e-05, |
|
"loss": 0.1998, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 2.8465822784810126, |
|
"grad_norm": 0.44446810521506797, |
|
"learning_rate": 1.8553916758952897e-05, |
|
"loss": 0.1989, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 2.850632911392405, |
|
"grad_norm": 0.360767790551225, |
|
"learning_rate": 1.8497312541880735e-05, |
|
"loss": 0.2004, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 2.8546835443037972, |
|
"grad_norm": 0.4349456939555442, |
|
"learning_rate": 1.8440720427246786e-05, |
|
"loss": 0.2036, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 2.8587341772151897, |
|
"grad_norm": 0.41294322140218026, |
|
"learning_rate": 1.8384140870836157e-05, |
|
"loss": 0.2037, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 2.8627848101265823, |
|
"grad_norm": 0.3665080509669357, |
|
"learning_rate": 1.8327574328332806e-05, |
|
"loss": 0.1997, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 2.8668354430379748, |
|
"grad_norm": 0.42756575406132435, |
|
"learning_rate": 1.8271021255315906e-05, |
|
"loss": 0.2184, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 2.8708860759493673, |
|
"grad_norm": 0.37727204040773576, |
|
"learning_rate": 1.8214482107256117e-05, |
|
"loss": 0.1936, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 2.8749367088607594, |
|
"grad_norm": 0.36773434037576863, |
|
"learning_rate": 1.8157957339511968e-05, |
|
"loss": 0.1931, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 2.878987341772152, |
|
"grad_norm": 0.4018047747137806, |
|
"learning_rate": 1.8101447407326182e-05, |
|
"loss": 0.216, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.8830379746835444, |
|
"grad_norm": 0.38008340638177746, |
|
"learning_rate": 1.8044952765821966e-05, |
|
"loss": 0.2055, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 2.8870886075949365, |
|
"grad_norm": 0.41302175541639724, |
|
"learning_rate": 1.7988473869999407e-05, |
|
"loss": 0.2183, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 2.891139240506329, |
|
"grad_norm": 0.37247251297042033, |
|
"learning_rate": 1.7932011174731764e-05, |
|
"loss": 0.2136, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 2.8951898734177215, |
|
"grad_norm": 0.3706994058682389, |
|
"learning_rate": 1.7875565134761817e-05, |
|
"loss": 0.2225, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 2.899240506329114, |
|
"grad_norm": 0.40772209756804834, |
|
"learning_rate": 1.7819136204698226e-05, |
|
"loss": 0.2052, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 2.9032911392405065, |
|
"grad_norm": 0.3780592018739491, |
|
"learning_rate": 1.776272483901182e-05, |
|
"loss": 0.2044, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 2.9073417721518986, |
|
"grad_norm": 0.35509864409828873, |
|
"learning_rate": 1.7706331492031995e-05, |
|
"loss": 0.2089, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 2.911392405063291, |
|
"grad_norm": 0.3742693943684576, |
|
"learning_rate": 1.764995661794301e-05, |
|
"loss": 0.2233, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 2.9154430379746836, |
|
"grad_norm": 0.42060993075969155, |
|
"learning_rate": 1.759360067078035e-05, |
|
"loss": 0.1869, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 2.9194936708860757, |
|
"grad_norm": 0.39192870298914495, |
|
"learning_rate": 1.7537264104427064e-05, |
|
"loss": 0.1967, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.923544303797468, |
|
"grad_norm": 0.38274384043646287, |
|
"learning_rate": 1.748094737261012e-05, |
|
"loss": 0.2112, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 2.9275949367088607, |
|
"grad_norm": 0.4136391115345298, |
|
"learning_rate": 1.7424650928896726e-05, |
|
"loss": 0.1938, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 2.9316455696202532, |
|
"grad_norm": 0.3991861880632269, |
|
"learning_rate": 1.7368375226690712e-05, |
|
"loss": 0.2089, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 2.9356962025316458, |
|
"grad_norm": 0.37389595108941354, |
|
"learning_rate": 1.731212071922883e-05, |
|
"loss": 0.1983, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 2.939746835443038, |
|
"grad_norm": 0.3623186365670296, |
|
"learning_rate": 1.7255887859577156e-05, |
|
"loss": 0.2122, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 2.9437974683544303, |
|
"grad_norm": 0.3463479111675089, |
|
"learning_rate": 1.7199677100627427e-05, |
|
"loss": 0.2029, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 2.947848101265823, |
|
"grad_norm": 0.374321930534172, |
|
"learning_rate": 1.7143488895093343e-05, |
|
"loss": 0.2053, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 2.951898734177215, |
|
"grad_norm": 0.3371281854120934, |
|
"learning_rate": 1.7087323695506994e-05, |
|
"loss": 0.2161, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 2.9559493670886074, |
|
"grad_norm": 0.3480283098439618, |
|
"learning_rate": 1.7031181954215194e-05, |
|
"loss": 0.2163, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 0.3759684879652346, |
|
"learning_rate": 1.6975064123375788e-05, |
|
"loss": 0.201, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.9640506329113925, |
|
"grad_norm": 0.3504840478946368, |
|
"learning_rate": 1.6918970654954084e-05, |
|
"loss": 0.2046, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 2.968101265822785, |
|
"grad_norm": 0.3670767693688908, |
|
"learning_rate": 1.686290200071915e-05, |
|
"loss": 0.1831, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 2.972151898734177, |
|
"grad_norm": 0.3943038353023971, |
|
"learning_rate": 1.6806858612240234e-05, |
|
"loss": 0.2198, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 2.9762025316455696, |
|
"grad_norm": 0.3696205681523504, |
|
"learning_rate": 1.6750840940883078e-05, |
|
"loss": 0.2053, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 2.980253164556962, |
|
"grad_norm": 0.3884089001060854, |
|
"learning_rate": 1.6694849437806305e-05, |
|
"loss": 0.2085, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 2.984303797468354, |
|
"grad_norm": 0.37287310823795344, |
|
"learning_rate": 1.663888455395778e-05, |
|
"loss": 0.1955, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 2.9883544303797467, |
|
"grad_norm": 0.38121893540702634, |
|
"learning_rate": 1.6582946740070995e-05, |
|
"loss": 0.2137, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 2.992405063291139, |
|
"grad_norm": 0.36101130429918893, |
|
"learning_rate": 1.6527036446661396e-05, |
|
"loss": 0.2032, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 2.9964556962025317, |
|
"grad_norm": 0.35923248816105424, |
|
"learning_rate": 1.6471154124022818e-05, |
|
"loss": 0.2122, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 3.002025316455696, |
|
"grad_norm": 0.3983906468269196, |
|
"learning_rate": 1.6415300222223788e-05, |
|
"loss": 0.169, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 3.0060759493670886, |
|
"grad_norm": 0.4244553959859824, |
|
"learning_rate": 1.6359475191103958e-05, |
|
"loss": 0.13, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 3.010126582278481, |
|
"grad_norm": 0.3362571162741139, |
|
"learning_rate": 1.6303679480270466e-05, |
|
"loss": 0.1185, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 3.0141772151898736, |
|
"grad_norm": 0.4442508566228957, |
|
"learning_rate": 1.624791353909428e-05, |
|
"loss": 0.1332, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 3.0182278481012657, |
|
"grad_norm": 0.5356167020535704, |
|
"learning_rate": 1.619217781670663e-05, |
|
"loss": 0.1202, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 3.022278481012658, |
|
"grad_norm": 0.44668912508938813, |
|
"learning_rate": 1.6136472761995373e-05, |
|
"loss": 0.125, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 3.0263291139240507, |
|
"grad_norm": 0.3747480438630673, |
|
"learning_rate": 1.608079882360134e-05, |
|
"loss": 0.1212, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 3.030379746835443, |
|
"grad_norm": 0.41234001825964167, |
|
"learning_rate": 1.60251564499148e-05, |
|
"loss": 0.1294, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 3.0344303797468353, |
|
"grad_norm": 0.40640441979643455, |
|
"learning_rate": 1.596954608907176e-05, |
|
"loss": 0.1291, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 3.038481012658228, |
|
"grad_norm": 0.39046487536768865, |
|
"learning_rate": 1.591396818895043e-05, |
|
"loss": 0.1282, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 3.0425316455696203, |
|
"grad_norm": 0.3636931802073635, |
|
"learning_rate": 1.585842319716759e-05, |
|
"loss": 0.1286, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 3.046582278481013, |
|
"grad_norm": 0.3706754748677524, |
|
"learning_rate": 1.5802911561074944e-05, |
|
"loss": 0.1147, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 3.050632911392405, |
|
"grad_norm": 0.36072805782139533, |
|
"learning_rate": 1.5747433727755595e-05, |
|
"loss": 0.109, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 3.0546835443037974, |
|
"grad_norm": 0.4225498792827284, |
|
"learning_rate": 1.5691990144020376e-05, |
|
"loss": 0.1299, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 3.05873417721519, |
|
"grad_norm": 0.3935594380555468, |
|
"learning_rate": 1.5636581256404297e-05, |
|
"loss": 0.1194, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 3.0627848101265824, |
|
"grad_norm": 0.3359406811680265, |
|
"learning_rate": 1.558120751116291e-05, |
|
"loss": 0.1169, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 3.0668354430379745, |
|
"grad_norm": 0.3635940631965265, |
|
"learning_rate": 1.552586935426876e-05, |
|
"loss": 0.1294, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 3.070886075949367, |
|
"grad_norm": 0.3759790778104714, |
|
"learning_rate": 1.547056723140774e-05, |
|
"loss": 0.1343, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 3.0749367088607595, |
|
"grad_norm": 0.3453833944114269, |
|
"learning_rate": 1.5415301587975565e-05, |
|
"loss": 0.1423, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 3.078987341772152, |
|
"grad_norm": 0.34898582143355944, |
|
"learning_rate": 1.536007286907411e-05, |
|
"loss": 0.114, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 3.083037974683544, |
|
"grad_norm": 0.3518947362932589, |
|
"learning_rate": 1.5304881519507896e-05, |
|
"loss": 0.1201, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 3.0870886075949366, |
|
"grad_norm": 0.33976735739067165, |
|
"learning_rate": 1.5249727983780453e-05, |
|
"loss": 0.116, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 3.091139240506329, |
|
"grad_norm": 0.3523310594347651, |
|
"learning_rate": 1.5194612706090786e-05, |
|
"loss": 0.1294, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 3.0951898734177217, |
|
"grad_norm": 0.3473435788402965, |
|
"learning_rate": 1.5139536130329771e-05, |
|
"loss": 0.1192, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 3.0992405063291137, |
|
"grad_norm": 0.36848432258161273, |
|
"learning_rate": 1.508449870007656e-05, |
|
"loss": 0.1381, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 3.1032911392405063, |
|
"grad_norm": 0.3524790706420217, |
|
"learning_rate": 1.5029500858595056e-05, |
|
"loss": 0.1373, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 3.1073417721518988, |
|
"grad_norm": 0.34288409804791886, |
|
"learning_rate": 1.4974543048830328e-05, |
|
"loss": 0.1347, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 3.1113924050632913, |
|
"grad_norm": 0.3566163096067318, |
|
"learning_rate": 1.4919625713405e-05, |
|
"loss": 0.1364, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 3.1154430379746834, |
|
"grad_norm": 0.37604754572428545, |
|
"learning_rate": 1.4864749294615756e-05, |
|
"loss": 0.1267, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 3.119493670886076, |
|
"grad_norm": 0.34010876050110506, |
|
"learning_rate": 1.4809914234429716e-05, |
|
"loss": 0.1239, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 3.1235443037974684, |
|
"grad_norm": 0.34661690997016936, |
|
"learning_rate": 1.4755120974480923e-05, |
|
"loss": 0.1327, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 3.127594936708861, |
|
"grad_norm": 0.33560936100241373, |
|
"learning_rate": 1.4700369956066771e-05, |
|
"loss": 0.1313, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 3.131645569620253, |
|
"grad_norm": 0.34236652164681225, |
|
"learning_rate": 1.4645661620144413e-05, |
|
"loss": 0.1302, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 3.1356962025316455, |
|
"grad_norm": 0.3463383646935167, |
|
"learning_rate": 1.4590996407327284e-05, |
|
"loss": 0.1233, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 3.139746835443038, |
|
"grad_norm": 0.35194777818328293, |
|
"learning_rate": 1.4536374757881487e-05, |
|
"loss": 0.1158, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 3.1437974683544305, |
|
"grad_norm": 0.33571504708438993, |
|
"learning_rate": 1.4481797111722271e-05, |
|
"loss": 0.1142, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 3.1478481012658226, |
|
"grad_norm": 0.34218753489771236, |
|
"learning_rate": 1.4427263908410507e-05, |
|
"loss": 0.1138, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 3.151898734177215, |
|
"grad_norm": 0.3528904994791644, |
|
"learning_rate": 1.4372775587149108e-05, |
|
"loss": 0.1254, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 3.1559493670886076, |
|
"grad_norm": 0.32356574557346346, |
|
"learning_rate": 1.4318332586779522e-05, |
|
"loss": 0.134, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"grad_norm": 0.3307107139186243, |
|
"learning_rate": 1.4263935345778202e-05, |
|
"loss": 0.1415, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 3.164050632911392, |
|
"grad_norm": 0.3362812271613725, |
|
"learning_rate": 1.420958430225303e-05, |
|
"loss": 0.1254, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 3.1681012658227847, |
|
"grad_norm": 0.3347560598105406, |
|
"learning_rate": 1.415527989393985e-05, |
|
"loss": 0.1226, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 3.1721518987341772, |
|
"grad_norm": 0.32744365457350305, |
|
"learning_rate": 1.410102255819891e-05, |
|
"loss": 0.1212, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 3.1762025316455698, |
|
"grad_norm": 0.3237563815117898, |
|
"learning_rate": 1.404681273201131e-05, |
|
"loss": 0.1308, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 3.180253164556962, |
|
"grad_norm": 0.33978014117090144, |
|
"learning_rate": 1.399265085197556e-05, |
|
"loss": 0.1265, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 3.1843037974683543, |
|
"grad_norm": 0.3356530742811181, |
|
"learning_rate": 1.393853735430398e-05, |
|
"loss": 0.1185, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 3.188354430379747, |
|
"grad_norm": 0.3380684160071456, |
|
"learning_rate": 1.3884472674819246e-05, |
|
"loss": 0.1167, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 3.1924050632911394, |
|
"grad_norm": 0.32895214615621016, |
|
"learning_rate": 1.3830457248950864e-05, |
|
"loss": 0.1318, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 3.1964556962025314, |
|
"grad_norm": 0.33298570921454923, |
|
"learning_rate": 1.377649151173163e-05, |
|
"loss": 0.1216, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 3.200506329113924, |
|
"grad_norm": 0.33009909775199414, |
|
"learning_rate": 1.3722575897794181e-05, |
|
"loss": 0.1231, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 3.2045569620253165, |
|
"grad_norm": 0.32407989631670525, |
|
"learning_rate": 1.3668710841367472e-05, |
|
"loss": 0.1194, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 3.208607594936709, |
|
"grad_norm": 0.32515859439215766, |
|
"learning_rate": 1.361489677627324e-05, |
|
"loss": 0.1142, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 3.212658227848101, |
|
"grad_norm": 0.33597660405031493, |
|
"learning_rate": 1.3561134135922585e-05, |
|
"loss": 0.1357, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 3.2167088607594936, |
|
"grad_norm": 0.32418031909821465, |
|
"learning_rate": 1.350742335331241e-05, |
|
"loss": 0.1353, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 3.220759493670886, |
|
"grad_norm": 0.32849723364411487, |
|
"learning_rate": 1.345376486102198e-05, |
|
"loss": 0.1285, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 3.2248101265822786, |
|
"grad_norm": 0.3512004803787895, |
|
"learning_rate": 1.3400159091209414e-05, |
|
"loss": 0.1224, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 3.2288607594936707, |
|
"grad_norm": 0.34005087243453785, |
|
"learning_rate": 1.3346606475608216e-05, |
|
"loss": 0.1273, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 3.232911392405063, |
|
"grad_norm": 0.3466166284067475, |
|
"learning_rate": 1.3293107445523781e-05, |
|
"loss": 0.133, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 3.2369620253164557, |
|
"grad_norm": 0.33180522772508275, |
|
"learning_rate": 1.3239662431829949e-05, |
|
"loss": 0.1247, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 3.2410126582278482, |
|
"grad_norm": 0.35182947781966445, |
|
"learning_rate": 1.3186271864965509e-05, |
|
"loss": 0.136, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 3.2450632911392403, |
|
"grad_norm": 0.3187674084964833, |
|
"learning_rate": 1.3132936174930756e-05, |
|
"loss": 0.1216, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 3.249113924050633, |
|
"grad_norm": 0.32971165152141907, |
|
"learning_rate": 1.3079655791283995e-05, |
|
"loss": 0.1349, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 3.2531645569620253, |
|
"grad_norm": 0.3289707374141855, |
|
"learning_rate": 1.3026431143138108e-05, |
|
"loss": 0.125, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 3.257215189873418, |
|
"grad_norm": 0.32636662190371885, |
|
"learning_rate": 1.2973262659157114e-05, |
|
"loss": 0.1187, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 3.26126582278481, |
|
"grad_norm": 0.3356770558832485, |
|
"learning_rate": 1.2920150767552651e-05, |
|
"loss": 0.1401, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 3.2653164556962024, |
|
"grad_norm": 0.31734500755492734, |
|
"learning_rate": 1.2867095896080607e-05, |
|
"loss": 0.1288, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 3.269367088607595, |
|
"grad_norm": 0.33353980186093246, |
|
"learning_rate": 1.2814098472037612e-05, |
|
"loss": 0.1411, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 3.2734177215189875, |
|
"grad_norm": 0.3225180362638247, |
|
"learning_rate": 1.276115892225764e-05, |
|
"loss": 0.1234, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 3.27746835443038, |
|
"grad_norm": 0.33442918367203817, |
|
"learning_rate": 1.2708277673108555e-05, |
|
"loss": 0.1309, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 3.281518987341772, |
|
"grad_norm": 0.32833523292596, |
|
"learning_rate": 1.2655455150488649e-05, |
|
"loss": 0.1319, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 3.2855696202531646, |
|
"grad_norm": 0.3241478844091838, |
|
"learning_rate": 1.2602691779823272e-05, |
|
"loss": 0.1256, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 3.289620253164557, |
|
"grad_norm": 0.3308718987968567, |
|
"learning_rate": 1.2549987986061355e-05, |
|
"loss": 0.1213, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 3.293670886075949, |
|
"grad_norm": 0.32520286235485185, |
|
"learning_rate": 1.2497344193672005e-05, |
|
"loss": 0.1066, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 3.2977215189873417, |
|
"grad_norm": 0.32214167995990156, |
|
"learning_rate": 1.2444760826641092e-05, |
|
"loss": 0.1139, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 3.301772151898734, |
|
"grad_norm": 0.31907374063824673, |
|
"learning_rate": 1.2392238308467817e-05, |
|
"loss": 0.1269, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 3.3058227848101267, |
|
"grad_norm": 0.32338490621448773, |
|
"learning_rate": 1.2339777062161326e-05, |
|
"loss": 0.1123, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 3.309873417721519, |
|
"grad_norm": 0.3377998763973875, |
|
"learning_rate": 1.2287377510237293e-05, |
|
"loss": 0.12, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 3.3139240506329113, |
|
"grad_norm": 0.33011284654195994, |
|
"learning_rate": 1.2235040074714488e-05, |
|
"loss": 0.1273, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 3.317974683544304, |
|
"grad_norm": 0.33717577667107645, |
|
"learning_rate": 1.2182765177111434e-05, |
|
"loss": 0.1155, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 3.3220253164556963, |
|
"grad_norm": 0.33180549522475455, |
|
"learning_rate": 1.213055323844297e-05, |
|
"loss": 0.1154, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 3.3260759493670884, |
|
"grad_norm": 0.33129864313945034, |
|
"learning_rate": 1.2078404679216864e-05, |
|
"loss": 0.1242, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 3.330126582278481, |
|
"grad_norm": 0.3149929980291516, |
|
"learning_rate": 1.2026319919430458e-05, |
|
"loss": 0.1295, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 3.3341772151898734, |
|
"grad_norm": 0.3258321652045859, |
|
"learning_rate": 1.1974299378567227e-05, |
|
"loss": 0.1274, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 3.338227848101266, |
|
"grad_norm": 0.34516470750877165, |
|
"learning_rate": 1.1922343475593462e-05, |
|
"loss": 0.1172, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 3.3422784810126585, |
|
"grad_norm": 0.3309854313372272, |
|
"learning_rate": 1.187045262895488e-05, |
|
"loss": 0.1339, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 3.3463291139240505, |
|
"grad_norm": 0.33243602209341916, |
|
"learning_rate": 1.1818627256573203e-05, |
|
"loss": 0.1267, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 3.350379746835443, |
|
"grad_norm": 0.3369445394181662, |
|
"learning_rate": 1.1766867775842864e-05, |
|
"loss": 0.1223, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 3.3544303797468356, |
|
"grad_norm": 0.34349845635717385, |
|
"learning_rate": 1.1715174603627615e-05, |
|
"loss": 0.117, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 3.3584810126582276, |
|
"grad_norm": 0.33611039278856275, |
|
"learning_rate": 1.1663548156257147e-05, |
|
"loss": 0.124, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 3.36253164556962, |
|
"grad_norm": 0.3288908926485652, |
|
"learning_rate": 1.161198884952377e-05, |
|
"loss": 0.1213, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 3.3665822784810127, |
|
"grad_norm": 0.3342643365243101, |
|
"learning_rate": 1.1560497098679056e-05, |
|
"loss": 0.1149, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 3.370632911392405, |
|
"grad_norm": 0.35773526134533457, |
|
"learning_rate": 1.1509073318430479e-05, |
|
"loss": 0.1253, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 3.3746835443037977, |
|
"grad_norm": 0.3437638463335302, |
|
"learning_rate": 1.1457717922938116e-05, |
|
"loss": 0.1218, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 3.3787341772151898, |
|
"grad_norm": 0.3297706003967539, |
|
"learning_rate": 1.1406431325811233e-05, |
|
"loss": 0.1318, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 3.3827848101265823, |
|
"grad_norm": 0.3318182566333596, |
|
"learning_rate": 1.135521394010506e-05, |
|
"loss": 0.1179, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 3.386835443037975, |
|
"grad_norm": 0.37776339607440423, |
|
"learning_rate": 1.1304066178317367e-05, |
|
"loss": 0.1246, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 3.390886075949367, |
|
"grad_norm": 0.33566107323016675, |
|
"learning_rate": 1.1252988452385199e-05, |
|
"loss": 0.1433, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 3.3949367088607594, |
|
"grad_norm": 0.32480430446343067, |
|
"learning_rate": 1.1201981173681536e-05, |
|
"loss": 0.1203, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 3.398987341772152, |
|
"grad_norm": 0.35670357346344483, |
|
"learning_rate": 1.1151044753011991e-05, |
|
"loss": 0.1301, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 3.4030379746835444, |
|
"grad_norm": 0.35477983591006323, |
|
"learning_rate": 1.1100179600611491e-05, |
|
"loss": 0.14, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 3.407088607594937, |
|
"grad_norm": 0.33027284329950846, |
|
"learning_rate": 1.1049386126140985e-05, |
|
"loss": 0.1262, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 3.411139240506329, |
|
"grad_norm": 0.34458546160406217, |
|
"learning_rate": 1.0998664738684128e-05, |
|
"loss": 0.1273, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 3.4151898734177215, |
|
"grad_norm": 0.3618314106985589, |
|
"learning_rate": 1.0948015846744e-05, |
|
"loss": 0.1166, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 3.419240506329114, |
|
"grad_norm": 0.3225032874261283, |
|
"learning_rate": 1.0897439858239832e-05, |
|
"loss": 0.1212, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 3.423291139240506, |
|
"grad_norm": 0.3534439429138387, |
|
"learning_rate": 1.0846937180503652e-05, |
|
"loss": 0.1245, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 3.4273417721518986, |
|
"grad_norm": 0.364674215218473, |
|
"learning_rate": 1.0796508220277117e-05, |
|
"loss": 0.1368, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 3.431392405063291, |
|
"grad_norm": 0.33277192960783664, |
|
"learning_rate": 1.0746153383708107e-05, |
|
"loss": 0.1283, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 3.4354430379746836, |
|
"grad_norm": 0.33003101795537954, |
|
"learning_rate": 1.0695873076347579e-05, |
|
"loss": 0.1294, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 3.439493670886076, |
|
"grad_norm": 0.32698246462993796, |
|
"learning_rate": 1.0645667703146205e-05, |
|
"loss": 0.1304, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 3.4435443037974682, |
|
"grad_norm": 0.33254760107489306, |
|
"learning_rate": 1.0595537668451161e-05, |
|
"loss": 0.1233, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 3.4475949367088607, |
|
"grad_norm": 0.3228142629083158, |
|
"learning_rate": 1.0545483376002854e-05, |
|
"loss": 0.137, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 3.4516455696202533, |
|
"grad_norm": 0.34469921482283067, |
|
"learning_rate": 1.0495505228931676e-05, |
|
"loss": 0.1305, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 3.4556962025316453, |
|
"grad_norm": 0.3137526118672114, |
|
"learning_rate": 1.044560362975474e-05, |
|
"loss": 0.1087, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 3.459746835443038, |
|
"grad_norm": 0.3365059801826103, |
|
"learning_rate": 1.0395778980372695e-05, |
|
"loss": 0.1201, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 3.4637974683544304, |
|
"grad_norm": 0.32864092125954797, |
|
"learning_rate": 1.0346031682066381e-05, |
|
"loss": 0.1222, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 3.467848101265823, |
|
"grad_norm": 0.333858825737718, |
|
"learning_rate": 1.0296362135493724e-05, |
|
"loss": 0.1129, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 3.4718987341772154, |
|
"grad_norm": 0.3296036770400852, |
|
"learning_rate": 1.0246770740686422e-05, |
|
"loss": 0.1239, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 3.4759493670886075, |
|
"grad_norm": 0.3131648063156865, |
|
"learning_rate": 1.0197257897046743e-05, |
|
"loss": 0.1144, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"grad_norm": 0.32313721631266046, |
|
"learning_rate": 1.014782400334433e-05, |
|
"loss": 0.1125, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 3.4840506329113925, |
|
"grad_norm": 0.35063963431076955, |
|
"learning_rate": 1.009846945771296e-05, |
|
"loss": 0.1296, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 3.4881012658227846, |
|
"grad_norm": 0.32579859367772007, |
|
"learning_rate": 1.0049194657647363e-05, |
|
"loss": 0.1152, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 3.492151898734177, |
|
"grad_norm": 0.31912004876753747, |
|
"learning_rate": 1.0000000000000006e-05, |
|
"loss": 0.1312, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 3.4962025316455696, |
|
"grad_norm": 0.3294786248361997, |
|
"learning_rate": 9.950885880977891e-06, |
|
"loss": 0.1316, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 3.500253164556962, |
|
"grad_norm": 0.30787000188276176, |
|
"learning_rate": 9.901852696139382e-06, |
|
"loss": 0.136, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 3.5043037974683546, |
|
"grad_norm": 0.32368848647959375, |
|
"learning_rate": 9.852900840391027e-06, |
|
"loss": 0.1231, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 3.5083544303797467, |
|
"grad_norm": 0.3307484271264743, |
|
"learning_rate": 9.804030707984313e-06, |
|
"loss": 0.1254, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 3.512405063291139, |
|
"grad_norm": 0.32300511527213077, |
|
"learning_rate": 9.755242692512599e-06, |
|
"loss": 0.1193, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 3.5164556962025317, |
|
"grad_norm": 0.32140858578433174, |
|
"learning_rate": 9.70653718690782e-06, |
|
"loss": 0.1175, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 3.520506329113924, |
|
"grad_norm": 0.3241598303977128, |
|
"learning_rate": 9.657914583437454e-06, |
|
"loss": 0.1248, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 3.5245569620253163, |
|
"grad_norm": 0.30781711893225583, |
|
"learning_rate": 9.609375273701246e-06, |
|
"loss": 0.1182, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 3.528607594936709, |
|
"grad_norm": 0.3358961314140347, |
|
"learning_rate": 9.560919648628133e-06, |
|
"loss": 0.1362, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 3.5326582278481014, |
|
"grad_norm": 0.31484504461476326, |
|
"learning_rate": 9.512548098473047e-06, |
|
"loss": 0.1324, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 3.536708860759494, |
|
"grad_norm": 0.31293289122421375, |
|
"learning_rate": 9.464261012813825e-06, |
|
"loss": 0.114, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 3.540759493670886, |
|
"grad_norm": 0.31050129203696275, |
|
"learning_rate": 9.416058780547987e-06, |
|
"loss": 0.1237, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 3.5448101265822785, |
|
"grad_norm": 0.3168747343162724, |
|
"learning_rate": 9.367941789889714e-06, |
|
"loss": 0.1276, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 3.548860759493671, |
|
"grad_norm": 0.3260028482599908, |
|
"learning_rate": 9.319910428366607e-06, |
|
"loss": 0.121, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 3.552911392405063, |
|
"grad_norm": 0.3179045474178272, |
|
"learning_rate": 9.271965082816667e-06, |
|
"loss": 0.1285, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 3.5569620253164556, |
|
"grad_norm": 0.31548460139980355, |
|
"learning_rate": 9.224106139385111e-06, |
|
"loss": 0.116, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 3.561012658227848, |
|
"grad_norm": 0.32019716520551045, |
|
"learning_rate": 9.176333983521291e-06, |
|
"loss": 0.1207, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 3.5650632911392406, |
|
"grad_norm": 0.3308125346715367, |
|
"learning_rate": 9.12864899997558e-06, |
|
"loss": 0.1236, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 3.569113924050633, |
|
"grad_norm": 0.32171031094464997, |
|
"learning_rate": 9.08105157279628e-06, |
|
"loss": 0.1297, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 3.573164556962025, |
|
"grad_norm": 0.3120988060708379, |
|
"learning_rate": 9.03354208532653e-06, |
|
"loss": 0.1205, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 3.5772151898734177, |
|
"grad_norm": 0.33579746167453417, |
|
"learning_rate": 8.986120920201205e-06, |
|
"loss": 0.1177, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 3.58126582278481, |
|
"grad_norm": 0.3213026350312677, |
|
"learning_rate": 8.938788459343852e-06, |
|
"loss": 0.1088, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 3.5853164556962023, |
|
"grad_norm": 0.3293624519574336, |
|
"learning_rate": 8.8915450839636e-06, |
|
"loss": 0.1366, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 3.589367088607595, |
|
"grad_norm": 0.31966287433493196, |
|
"learning_rate": 8.844391174552116e-06, |
|
"loss": 0.1349, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 3.5934177215189873, |
|
"grad_norm": 0.3275789400133564, |
|
"learning_rate": 8.797327110880479e-06, |
|
"loss": 0.1133, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 3.59746835443038, |
|
"grad_norm": 0.323582523740489, |
|
"learning_rate": 8.750353271996206e-06, |
|
"loss": 0.1248, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 3.6015189873417723, |
|
"grad_norm": 0.30562322651262236, |
|
"learning_rate": 8.703470036220132e-06, |
|
"loss": 0.1205, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 3.6055696202531644, |
|
"grad_norm": 0.3423022247344568, |
|
"learning_rate": 8.656677781143394e-06, |
|
"loss": 0.1176, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 3.609620253164557, |
|
"grad_norm": 0.3207918941657794, |
|
"learning_rate": 8.609976883624377e-06, |
|
"loss": 0.1212, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 3.6136708860759494, |
|
"grad_norm": 0.3382291915177461, |
|
"learning_rate": 8.563367719785698e-06, |
|
"loss": 0.1183, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 3.6177215189873415, |
|
"grad_norm": 0.32103274909740526, |
|
"learning_rate": 8.516850665011138e-06, |
|
"loss": 0.1175, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 3.621772151898734, |
|
"grad_norm": 0.3153670803644698, |
|
"learning_rate": 8.47042609394269e-06, |
|
"loss": 0.1245, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 3.6258227848101265, |
|
"grad_norm": 0.31704083373767844, |
|
"learning_rate": 8.424094380477432e-06, |
|
"loss": 0.1274, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 3.629873417721519, |
|
"grad_norm": 0.32463465696160715, |
|
"learning_rate": 8.37785589776465e-06, |
|
"loss": 0.1246, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 3.6339240506329116, |
|
"grad_norm": 0.3334257635206265, |
|
"learning_rate": 8.331711018202694e-06, |
|
"loss": 0.1167, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 3.6379746835443036, |
|
"grad_norm": 0.32430187505161, |
|
"learning_rate": 8.285660113436104e-06, |
|
"loss": 0.1236, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 3.642025316455696, |
|
"grad_norm": 0.3316979737483432, |
|
"learning_rate": 8.239703554352527e-06, |
|
"loss": 0.13, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 3.6460759493670887, |
|
"grad_norm": 0.31992233510284124, |
|
"learning_rate": 8.193841711079775e-06, |
|
"loss": 0.123, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 3.6501265822784807, |
|
"grad_norm": 0.32151546635602796, |
|
"learning_rate": 8.148074952982828e-06, |
|
"loss": 0.1202, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 3.6541772151898733, |
|
"grad_norm": 0.3199911623434018, |
|
"learning_rate": 8.102403648660859e-06, |
|
"loss": 0.1213, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 3.6582278481012658, |
|
"grad_norm": 0.30654357164095763, |
|
"learning_rate": 8.056828165944282e-06, |
|
"loss": 0.1139, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 3.6622784810126583, |
|
"grad_norm": 0.33725617427129473, |
|
"learning_rate": 8.011348871891762e-06, |
|
"loss": 0.1263, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 3.666329113924051, |
|
"grad_norm": 0.3227132324047214, |
|
"learning_rate": 7.965966132787287e-06, |
|
"loss": 0.1182, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 3.670379746835443, |
|
"grad_norm": 0.33211495692138693, |
|
"learning_rate": 7.920680314137189e-06, |
|
"loss": 0.1348, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 3.6744303797468354, |
|
"grad_norm": 0.3335673602824367, |
|
"learning_rate": 7.875491780667246e-06, |
|
"loss": 0.1347, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 3.678481012658228, |
|
"grad_norm": 0.3095694414814613, |
|
"learning_rate": 7.830400896319667e-06, |
|
"loss": 0.1173, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 3.68253164556962, |
|
"grad_norm": 0.3304781587384214, |
|
"learning_rate": 7.785408024250259e-06, |
|
"loss": 0.1262, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 3.6865822784810125, |
|
"grad_norm": 0.3141718441293744, |
|
"learning_rate": 7.74051352682542e-06, |
|
"loss": 0.0984, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 3.690632911392405, |
|
"grad_norm": 0.3487473074598513, |
|
"learning_rate": 7.695717765619257e-06, |
|
"loss": 0.1249, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 3.6946835443037975, |
|
"grad_norm": 0.3109681415601883, |
|
"learning_rate": 7.651021101410673e-06, |
|
"loss": 0.1275, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 3.69873417721519, |
|
"grad_norm": 0.31483703031969046, |
|
"learning_rate": 7.606423894180464e-06, |
|
"loss": 0.1202, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 3.702784810126582, |
|
"grad_norm": 0.3337938742575738, |
|
"learning_rate": 7.56192650310839e-06, |
|
"loss": 0.1162, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 3.7068354430379746, |
|
"grad_norm": 0.3232153139461174, |
|
"learning_rate": 7.517529286570349e-06, |
|
"loss": 0.1209, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 3.710886075949367, |
|
"grad_norm": 0.32261403311146064, |
|
"learning_rate": 7.473232602135387e-06, |
|
"loss": 0.1241, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 3.714936708860759, |
|
"grad_norm": 0.3385704413354954, |
|
"learning_rate": 7.429036806562935e-06, |
|
"loss": 0.117, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 3.7189873417721517, |
|
"grad_norm": 0.31646125994246005, |
|
"learning_rate": 7.3849422557998455e-06, |
|
"loss": 0.1257, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 3.7230379746835442, |
|
"grad_norm": 0.3345311299474117, |
|
"learning_rate": 7.340949304977567e-06, |
|
"loss": 0.1237, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 3.7270886075949368, |
|
"grad_norm": 0.3094288863988066, |
|
"learning_rate": 7.297058308409282e-06, |
|
"loss": 0.1211, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 3.7311392405063293, |
|
"grad_norm": 0.3213683135032255, |
|
"learning_rate": 7.25326961958704e-06, |
|
"loss": 0.1243, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 3.7351898734177214, |
|
"grad_norm": 0.30907696418446123, |
|
"learning_rate": 7.209583591178921e-06, |
|
"loss": 0.1105, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 3.739240506329114, |
|
"grad_norm": 0.3077955863091849, |
|
"learning_rate": 7.1660005750261925e-06, |
|
"loss": 0.1436, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 3.7432911392405064, |
|
"grad_norm": 0.3073389398346609, |
|
"learning_rate": 7.1225209221404765e-06, |
|
"loss": 0.1166, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 3.747341772151899, |
|
"grad_norm": 0.3083081825342774, |
|
"learning_rate": 7.079144982700909e-06, |
|
"loss": 0.1159, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 3.7513924050632914, |
|
"grad_norm": 0.329741416831244, |
|
"learning_rate": 7.0358731060513695e-06, |
|
"loss": 0.1197, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 3.7554430379746835, |
|
"grad_norm": 0.31248098982422673, |
|
"learning_rate": 6.99270564069757e-06, |
|
"loss": 0.124, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 3.759493670886076, |
|
"grad_norm": 0.30440580774084036, |
|
"learning_rate": 6.949642934304375e-06, |
|
"loss": 0.1148, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 3.7635443037974685, |
|
"grad_norm": 0.3064458547744251, |
|
"learning_rate": 6.906685333692871e-06, |
|
"loss": 0.1286, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 3.7675949367088606, |
|
"grad_norm": 0.3105872946114702, |
|
"learning_rate": 6.86383318483769e-06, |
|
"loss": 0.1268, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 3.771645569620253, |
|
"grad_norm": 0.32794780893626496, |
|
"learning_rate": 6.821086832864139e-06, |
|
"loss": 0.1139, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 3.7756962025316456, |
|
"grad_norm": 0.30538269973553456, |
|
"learning_rate": 6.77844662204546e-06, |
|
"loss": 0.1228, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 3.779746835443038, |
|
"grad_norm": 0.3154855017953215, |
|
"learning_rate": 6.7359128958000455e-06, |
|
"loss": 0.118, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 3.7837974683544306, |
|
"grad_norm": 0.3116672907326428, |
|
"learning_rate": 6.693485996688695e-06, |
|
"loss": 0.1365, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 3.7878481012658227, |
|
"grad_norm": 0.30799121400425367, |
|
"learning_rate": 6.651166266411801e-06, |
|
"loss": 0.1202, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 3.7918987341772152, |
|
"grad_norm": 0.3112981760632191, |
|
"learning_rate": 6.6089540458066725e-06, |
|
"loss": 0.1307, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 3.7959493670886078, |
|
"grad_norm": 0.3288261904108576, |
|
"learning_rate": 6.566849674844711e-06, |
|
"loss": 0.1322, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"grad_norm": 0.31253227663991584, |
|
"learning_rate": 6.524853492628747e-06, |
|
"loss": 0.1284, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 3.8040506329113923, |
|
"grad_norm": 0.3266196930765685, |
|
"learning_rate": 6.4829658373902536e-06, |
|
"loss": 0.1347, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 3.808101265822785, |
|
"grad_norm": 0.32079716259325025, |
|
"learning_rate": 6.441187046486648e-06, |
|
"loss": 0.1495, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 3.8121518987341774, |
|
"grad_norm": 0.3106137957070218, |
|
"learning_rate": 6.399517456398567e-06, |
|
"loss": 0.1131, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 3.81620253164557, |
|
"grad_norm": 0.3124417286474685, |
|
"learning_rate": 6.357957402727164e-06, |
|
"loss": 0.1262, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 3.820253164556962, |
|
"grad_norm": 0.3171163399475635, |
|
"learning_rate": 6.316507220191395e-06, |
|
"loss": 0.1095, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 3.8243037974683545, |
|
"grad_norm": 0.3132870054314758, |
|
"learning_rate": 6.275167242625331e-06, |
|
"loss": 0.1264, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 3.828354430379747, |
|
"grad_norm": 0.3107434347873801, |
|
"learning_rate": 6.233937802975471e-06, |
|
"loss": 0.1144, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 3.832405063291139, |
|
"grad_norm": 0.3059059286380862, |
|
"learning_rate": 6.192819233298046e-06, |
|
"loss": 0.1314, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 3.8364556962025316, |
|
"grad_norm": 0.3014714206961812, |
|
"learning_rate": 6.151811864756383e-06, |
|
"loss": 0.1138, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 3.840506329113924, |
|
"grad_norm": 0.30025004971888786, |
|
"learning_rate": 6.1109160276181655e-06, |
|
"loss": 0.1258, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 3.8445569620253166, |
|
"grad_norm": 0.29718582380046005, |
|
"learning_rate": 6.070132051252868e-06, |
|
"loss": 0.1089, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 3.848607594936709, |
|
"grad_norm": 0.3016634281217258, |
|
"learning_rate": 6.0294602641290034e-06, |
|
"loss": 0.1132, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 3.852658227848101, |
|
"grad_norm": 0.3210905891236295, |
|
"learning_rate": 5.988900993811575e-06, |
|
"loss": 0.1245, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 3.8567088607594937, |
|
"grad_norm": 0.3035383911919104, |
|
"learning_rate": 5.948454566959363e-06, |
|
"loss": 0.1193, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 3.8607594936708862, |
|
"grad_norm": 0.29730608222148697, |
|
"learning_rate": 5.908121309322328e-06, |
|
"loss": 0.1225, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 3.8648101265822783, |
|
"grad_norm": 0.31319889058117334, |
|
"learning_rate": 5.867901545738976e-06, |
|
"loss": 0.1286, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 3.868860759493671, |
|
"grad_norm": 0.29568152945212156, |
|
"learning_rate": 5.827795600133774e-06, |
|
"loss": 0.1276, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 3.8729113924050633, |
|
"grad_norm": 0.30818836294510654, |
|
"learning_rate": 5.787803795514466e-06, |
|
"loss": 0.1223, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 3.876962025316456, |
|
"grad_norm": 0.2909392251363715, |
|
"learning_rate": 5.747926453969576e-06, |
|
"loss": 0.1178, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 3.8810126582278484, |
|
"grad_norm": 0.31612076901908576, |
|
"learning_rate": 5.708163896665708e-06, |
|
"loss": 0.1195, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 3.8850632911392404, |
|
"grad_norm": 0.3098180957201334, |
|
"learning_rate": 5.668516443845047e-06, |
|
"loss": 0.1286, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 3.889113924050633, |
|
"grad_norm": 0.3072362269425784, |
|
"learning_rate": 5.6289844148227225e-06, |
|
"loss": 0.1203, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 3.8931645569620255, |
|
"grad_norm": 0.3160621349779037, |
|
"learning_rate": 5.5895681279842615e-06, |
|
"loss": 0.1246, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 3.8972151898734175, |
|
"grad_norm": 0.3243707406147554, |
|
"learning_rate": 5.550267900783019e-06, |
|
"loss": 0.1156, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 3.90126582278481, |
|
"grad_norm": 0.3015747815592493, |
|
"learning_rate": 5.511084049737623e-06, |
|
"loss": 0.121, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 3.9053164556962026, |
|
"grad_norm": 0.3110670150892755, |
|
"learning_rate": 5.4720168904294215e-06, |
|
"loss": 0.1141, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 3.909367088607595, |
|
"grad_norm": 0.31249331750833387, |
|
"learning_rate": 5.433066737499948e-06, |
|
"loss": 0.1193, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 3.9134177215189876, |
|
"grad_norm": 0.32564815047683293, |
|
"learning_rate": 5.394233904648376e-06, |
|
"loss": 0.1335, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 3.9174683544303797, |
|
"grad_norm": 0.3017859622866368, |
|
"learning_rate": 5.355518704628997e-06, |
|
"loss": 0.1323, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 3.921518987341772, |
|
"grad_norm": 0.30719299368626357, |
|
"learning_rate": 5.316921449248731e-06, |
|
"loss": 0.1169, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 3.9255696202531647, |
|
"grad_norm": 0.3022112507830384, |
|
"learning_rate": 5.278442449364538e-06, |
|
"loss": 0.1237, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 3.9296202531645568, |
|
"grad_norm": 0.32587355928647954, |
|
"learning_rate": 5.240082014881016e-06, |
|
"loss": 0.1093, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 3.9336708860759493, |
|
"grad_norm": 0.3087517009674416, |
|
"learning_rate": 5.201840454747822e-06, |
|
"loss": 0.1224, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 3.937721518987342, |
|
"grad_norm": 0.2942235566935442, |
|
"learning_rate": 5.163718076957223e-06, |
|
"loss": 0.1173, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 3.9417721518987343, |
|
"grad_norm": 0.30686504395195363, |
|
"learning_rate": 5.125715188541609e-06, |
|
"loss": 0.1164, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 3.945822784810127, |
|
"grad_norm": 0.29592570206728813, |
|
"learning_rate": 5.087832095571021e-06, |
|
"loss": 0.1168, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 3.949873417721519, |
|
"grad_norm": 0.2952796554380247, |
|
"learning_rate": 5.0500691031506766e-06, |
|
"loss": 0.1273, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 3.9539240506329114, |
|
"grad_norm": 0.308042818228654, |
|
"learning_rate": 5.01242651541854e-06, |
|
"loss": 0.1249, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 3.957974683544304, |
|
"grad_norm": 0.31275901321070587, |
|
"learning_rate": 4.974904635542815e-06, |
|
"loss": 0.1178, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 3.962025316455696, |
|
"grad_norm": 0.31197231255322466, |
|
"learning_rate": 4.937503765719582e-06, |
|
"loss": 0.1112, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 3.9660759493670885, |
|
"grad_norm": 0.3152495038169951, |
|
"learning_rate": 4.900224207170299e-06, |
|
"loss": 0.1161, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 3.970126582278481, |
|
"grad_norm": 0.3134884531705615, |
|
"learning_rate": 4.8630662601394065e-06, |
|
"loss": 0.1352, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 3.9741772151898735, |
|
"grad_norm": 0.3153291202745328, |
|
"learning_rate": 4.8260302238918995e-06, |
|
"loss": 0.1073, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 3.978227848101266, |
|
"grad_norm": 0.3086506812632402, |
|
"learning_rate": 4.789116396710924e-06, |
|
"loss": 0.1201, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 3.982278481012658, |
|
"grad_norm": 0.3103529635307643, |
|
"learning_rate": 4.752325075895368e-06, |
|
"loss": 0.1168, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 3.9863291139240506, |
|
"grad_norm": 0.31559875008913396, |
|
"learning_rate": 4.715656557757473e-06, |
|
"loss": 0.1146, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 3.990379746835443, |
|
"grad_norm": 0.3031496229955999, |
|
"learning_rate": 4.679111137620442e-06, |
|
"loss": 0.1214, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 3.9944303797468352, |
|
"grad_norm": 0.3202500023042191, |
|
"learning_rate": 4.6426891098160585e-06, |
|
"loss": 0.1192, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 3.9984810126582278, |
|
"grad_norm": 0.2939468808863193, |
|
"learning_rate": 4.6063907676823474e-06, |
|
"loss": 0.1126, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 4.004050632911392, |
|
"grad_norm": 0.3855780565317895, |
|
"learning_rate": 4.570216403561141e-06, |
|
"loss": 0.0734, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 4.008101265822785, |
|
"grad_norm": 0.32596973574541377, |
|
"learning_rate": 4.534166308795815e-06, |
|
"loss": 0.0778, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 4.012151898734177, |
|
"grad_norm": 0.28148275470241824, |
|
"learning_rate": 4.498240773728859e-06, |
|
"loss": 0.084, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 4.01620253164557, |
|
"grad_norm": 0.2600389321736882, |
|
"learning_rate": 4.462440087699609e-06, |
|
"loss": 0.0934, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 4.020253164556962, |
|
"grad_norm": 0.23665642840681858, |
|
"learning_rate": 4.426764539041861e-06, |
|
"loss": 0.0671, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 4.024303797468354, |
|
"grad_norm": 0.2744768936268161, |
|
"learning_rate": 4.391214415081582e-06, |
|
"loss": 0.0627, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 4.028354430379747, |
|
"grad_norm": 0.32513650319753323, |
|
"learning_rate": 4.355790002134579e-06, |
|
"loss": 0.0757, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 4.032405063291139, |
|
"grad_norm": 0.35828175104971627, |
|
"learning_rate": 4.320491585504207e-06, |
|
"loss": 0.0776, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 4.036455696202531, |
|
"grad_norm": 0.32845979878630405, |
|
"learning_rate": 4.2853194494790615e-06, |
|
"loss": 0.0715, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 4.040506329113924, |
|
"grad_norm": 0.31177769468366234, |
|
"learning_rate": 4.250273877330691e-06, |
|
"loss": 0.0819, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 4.044556962025316, |
|
"grad_norm": 0.283483981087915, |
|
"learning_rate": 4.215355151311313e-06, |
|
"loss": 0.0712, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 4.048607594936709, |
|
"grad_norm": 0.2616154450811069, |
|
"learning_rate": 4.180563552651542e-06, |
|
"loss": 0.0774, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 4.052658227848101, |
|
"grad_norm": 0.25708560725222573, |
|
"learning_rate": 4.145899361558147e-06, |
|
"loss": 0.0775, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 4.056708860759493, |
|
"grad_norm": 0.2558748252190518, |
|
"learning_rate": 4.111362857211738e-06, |
|
"loss": 0.078, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 4.060759493670886, |
|
"grad_norm": 0.27399432063820217, |
|
"learning_rate": 4.076954317764592e-06, |
|
"loss": 0.073, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 4.0648101265822785, |
|
"grad_norm": 0.29130613927831617, |
|
"learning_rate": 4.042674020338335e-06, |
|
"loss": 0.0807, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 4.0688607594936705, |
|
"grad_norm": 0.2837187282558894, |
|
"learning_rate": 4.0085222410217835e-06, |
|
"loss": 0.0885, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 4.0729113924050635, |
|
"grad_norm": 0.261585242067623, |
|
"learning_rate": 3.974499254868674e-06, |
|
"loss": 0.086, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 4.076962025316456, |
|
"grad_norm": 0.25197518677511843, |
|
"learning_rate": 3.940605335895451e-06, |
|
"loss": 0.0775, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 4.0810126582278485, |
|
"grad_norm": 0.26046723354995144, |
|
"learning_rate": 3.90684075707908e-06, |
|
"loss": 0.0772, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 4.085063291139241, |
|
"grad_norm": 0.27053933620149667, |
|
"learning_rate": 3.8732057903548505e-06, |
|
"loss": 0.0703, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 4.089113924050633, |
|
"grad_norm": 0.25924782979106414, |
|
"learning_rate": 3.8397007066141375e-06, |
|
"loss": 0.0716, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 4.093164556962026, |
|
"grad_norm": 0.25529207372276197, |
|
"learning_rate": 3.806325775702304e-06, |
|
"loss": 0.0745, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 4.097215189873418, |
|
"grad_norm": 0.2632416241187742, |
|
"learning_rate": 3.773081266416434e-06, |
|
"loss": 0.0855, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 4.10126582278481, |
|
"grad_norm": 0.2821795201441978, |
|
"learning_rate": 3.739967446503245e-06, |
|
"loss": 0.0788, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 4.105316455696203, |
|
"grad_norm": 0.25966781643280884, |
|
"learning_rate": 3.706984582656894e-06, |
|
"loss": 0.0821, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 4.109367088607595, |
|
"grad_norm": 0.2621406358175322, |
|
"learning_rate": 3.6741329405168237e-06, |
|
"loss": 0.0751, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 4.113417721518988, |
|
"grad_norm": 0.26467964728769455, |
|
"learning_rate": 3.641412784665648e-06, |
|
"loss": 0.0664, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 4.11746835443038, |
|
"grad_norm": 0.24342782783189904, |
|
"learning_rate": 3.608824378627005e-06, |
|
"loss": 0.0769, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 4.121518987341772, |
|
"grad_norm": 0.248469473225614, |
|
"learning_rate": 3.5763679848634337e-06, |
|
"loss": 0.0711, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 4.125569620253165, |
|
"grad_norm": 0.25998039195897366, |
|
"learning_rate": 3.544043864774269e-06, |
|
"loss": 0.0737, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 4.129620253164557, |
|
"grad_norm": 0.25772734492858035, |
|
"learning_rate": 3.5118522786935282e-06, |
|
"loss": 0.0928, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 4.133670886075949, |
|
"grad_norm": 0.27656475147441606, |
|
"learning_rate": 3.479793485887819e-06, |
|
"loss": 0.0751, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 4.137721518987342, |
|
"grad_norm": 0.2475774160642052, |
|
"learning_rate": 3.4478677445542653e-06, |
|
"loss": 0.0694, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 4.141772151898734, |
|
"grad_norm": 0.24331644093959373, |
|
"learning_rate": 3.4160753118183767e-06, |
|
"loss": 0.0659, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 4.145822784810127, |
|
"grad_norm": 0.24701226152330016, |
|
"learning_rate": 3.3844164437320527e-06, |
|
"loss": 0.0803, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 4.149873417721519, |
|
"grad_norm": 0.26151294974937445, |
|
"learning_rate": 3.3528913952714558e-06, |
|
"loss": 0.0793, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 4.153924050632911, |
|
"grad_norm": 0.3403337864097617, |
|
"learning_rate": 3.321500420335e-06, |
|
"loss": 0.0736, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 4.157974683544304, |
|
"grad_norm": 0.25930768509727903, |
|
"learning_rate": 3.290243771741275e-06, |
|
"loss": 0.0871, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 4.162025316455696, |
|
"grad_norm": 0.26042630805331113, |
|
"learning_rate": 3.2591217012270325e-06, |
|
"loss": 0.0842, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 4.166075949367088, |
|
"grad_norm": 0.24180236820418133, |
|
"learning_rate": 3.228134459445149e-06, |
|
"loss": 0.0789, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 4.170126582278481, |
|
"grad_norm": 0.2617145025916907, |
|
"learning_rate": 3.1972822959626205e-06, |
|
"loss": 0.0866, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 4.174177215189873, |
|
"grad_norm": 0.25743683361489017, |
|
"learning_rate": 3.166565459258513e-06, |
|
"loss": 0.078, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 4.178227848101266, |
|
"grad_norm": 0.25212910880858, |
|
"learning_rate": 3.1359841967220193e-06, |
|
"loss": 0.0883, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 4.182278481012658, |
|
"grad_norm": 0.250136211470481, |
|
"learning_rate": 3.105538754650419e-06, |
|
"loss": 0.0802, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 4.18632911392405, |
|
"grad_norm": 0.24007100221542632, |
|
"learning_rate": 3.07522937824712e-06, |
|
"loss": 0.0681, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 4.190379746835443, |
|
"grad_norm": 0.24201117781003276, |
|
"learning_rate": 3.0450563116196697e-06, |
|
"loss": 0.0638, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 4.194430379746835, |
|
"grad_norm": 0.25647514464276866, |
|
"learning_rate": 3.0150197977778008e-06, |
|
"loss": 0.0779, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 4.1984810126582275, |
|
"grad_norm": 0.24877823998687126, |
|
"learning_rate": 2.985120078631465e-06, |
|
"loss": 0.0834, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 4.2025316455696204, |
|
"grad_norm": 0.2401799721580179, |
|
"learning_rate": 2.9553573949888893e-06, |
|
"loss": 0.0787, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 4.2065822784810125, |
|
"grad_norm": 0.24271964561839826, |
|
"learning_rate": 2.9257319865546384e-06, |
|
"loss": 0.0629, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 4.2106329113924055, |
|
"grad_norm": 0.26582074934096467, |
|
"learning_rate": 2.896244091927678e-06, |
|
"loss": 0.0668, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 4.2146835443037975, |
|
"grad_norm": 0.25475649600032485, |
|
"learning_rate": 2.8668939485994584e-06, |
|
"loss": 0.0769, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 4.21873417721519, |
|
"grad_norm": 0.2551999752264489, |
|
"learning_rate": 2.837681792951994e-06, |
|
"loss": 0.0753, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 4.222784810126583, |
|
"grad_norm": 0.2528662896461291, |
|
"learning_rate": 2.808607860255981e-06, |
|
"loss": 0.0666, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 4.226835443037975, |
|
"grad_norm": 0.2566839482067846, |
|
"learning_rate": 2.7796723846688634e-06, |
|
"loss": 0.0731, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 4.230886075949367, |
|
"grad_norm": 0.25358456328942813, |
|
"learning_rate": 2.7508755992329937e-06, |
|
"loss": 0.0975, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 4.23493670886076, |
|
"grad_norm": 0.2615932504605827, |
|
"learning_rate": 2.722217735873718e-06, |
|
"loss": 0.0807, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 4.238987341772152, |
|
"grad_norm": 0.24418860110686075, |
|
"learning_rate": 2.6936990253975315e-06, |
|
"loss": 0.0888, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 4.243037974683545, |
|
"grad_norm": 0.25718812081888787, |
|
"learning_rate": 2.665319697490205e-06, |
|
"loss": 0.0709, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 4.247088607594937, |
|
"grad_norm": 0.25281748171982277, |
|
"learning_rate": 2.637079980714945e-06, |
|
"loss": 0.0832, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 4.251139240506329, |
|
"grad_norm": 0.2446378792769266, |
|
"learning_rate": 2.6089801025105453e-06, |
|
"loss": 0.0674, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 4.255189873417722, |
|
"grad_norm": 0.2528952903999253, |
|
"learning_rate": 2.581020289189571e-06, |
|
"loss": 0.0726, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 4.259240506329114, |
|
"grad_norm": 0.2515861643020543, |
|
"learning_rate": 2.553200765936501e-06, |
|
"loss": 0.0719, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 4.263291139240506, |
|
"grad_norm": 0.2464272035555728, |
|
"learning_rate": 2.525521756805962e-06, |
|
"loss": 0.0747, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 4.267341772151899, |
|
"grad_norm": 0.24087746955095168, |
|
"learning_rate": 2.497983484720885e-06, |
|
"loss": 0.0679, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 4.271392405063291, |
|
"grad_norm": 0.2482871282318326, |
|
"learning_rate": 2.470586171470728e-06, |
|
"loss": 0.0744, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 4.275443037974684, |
|
"grad_norm": 0.25939992750346985, |
|
"learning_rate": 2.4433300377096836e-06, |
|
"loss": 0.0777, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 4.279493670886076, |
|
"grad_norm": 0.2534962493494359, |
|
"learning_rate": 2.4162153029549073e-06, |
|
"loss": 0.0893, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 4.283544303797468, |
|
"grad_norm": 0.2645444821042828, |
|
"learning_rate": 2.3892421855847458e-06, |
|
"loss": 0.0819, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 4.287594936708861, |
|
"grad_norm": 0.2432455654550311, |
|
"learning_rate": 2.362410902836978e-06, |
|
"loss": 0.0756, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 4.291645569620253, |
|
"grad_norm": 0.24955918609341013, |
|
"learning_rate": 2.3357216708070653e-06, |
|
"loss": 0.0725, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 4.295696202531645, |
|
"grad_norm": 0.24694534442559327, |
|
"learning_rate": 2.309174704446411e-06, |
|
"loss": 0.0741, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 4.299746835443038, |
|
"grad_norm": 0.2483491311879889, |
|
"learning_rate": 2.2827702175606437e-06, |
|
"loss": 0.0775, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 4.30379746835443, |
|
"grad_norm": 0.2530013710793077, |
|
"learning_rate": 2.256508422807855e-06, |
|
"loss": 0.0721, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 4.307848101265823, |
|
"grad_norm": 0.29954727063559183, |
|
"learning_rate": 2.230389531696946e-06, |
|
"loss": 0.0787, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 4.311898734177215, |
|
"grad_norm": 0.25014844227856137, |
|
"learning_rate": 2.204413754585857e-06, |
|
"loss": 0.0791, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 4.315949367088607, |
|
"grad_norm": 0.2585676410930106, |
|
"learning_rate": 2.1785813006799406e-06, |
|
"loss": 0.0789, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"grad_norm": 0.24642805855029867, |
|
"learning_rate": 2.1528923780302224e-06, |
|
"loss": 0.0721, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 4.324050632911392, |
|
"grad_norm": 0.22918127050955864, |
|
"learning_rate": 2.127347193531757e-06, |
|
"loss": 0.0728, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 4.328101265822784, |
|
"grad_norm": 0.25716162199950815, |
|
"learning_rate": 2.101945952921942e-06, |
|
"loss": 0.0742, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 4.332151898734177, |
|
"grad_norm": 0.2737055101775538, |
|
"learning_rate": 2.0766888607788906e-06, |
|
"loss": 0.0802, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 4.3362025316455695, |
|
"grad_norm": 0.2491845662046111, |
|
"learning_rate": 2.0515761205197337e-06, |
|
"loss": 0.0766, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 4.340253164556962, |
|
"grad_norm": 0.247056276070561, |
|
"learning_rate": 2.0266079343990453e-06, |
|
"loss": 0.0705, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 4.3443037974683545, |
|
"grad_norm": 0.2620400282621168, |
|
"learning_rate": 2.0017845035071494e-06, |
|
"loss": 0.0802, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 4.348354430379747, |
|
"grad_norm": 0.2450637823039621, |
|
"learning_rate": 1.9771060277685537e-06, |
|
"loss": 0.0822, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 4.3524050632911395, |
|
"grad_norm": 0.2742591644822662, |
|
"learning_rate": 1.95257270594031e-06, |
|
"loss": 0.0906, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 4.356455696202532, |
|
"grad_norm": 0.253946343397036, |
|
"learning_rate": 1.9281847356104188e-06, |
|
"loss": 0.086, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 4.360506329113924, |
|
"grad_norm": 0.24727750475206697, |
|
"learning_rate": 1.9039423131962365e-06, |
|
"loss": 0.0796, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 4.364556962025317, |
|
"grad_norm": 0.26655910041948777, |
|
"learning_rate": 1.8798456339429027e-06, |
|
"loss": 0.0803, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 4.368607594936709, |
|
"grad_norm": 0.2580519837978843, |
|
"learning_rate": 1.8558948919217612e-06, |
|
"loss": 0.0737, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 4.372658227848102, |
|
"grad_norm": 0.24961940286798082, |
|
"learning_rate": 1.8320902800287954e-06, |
|
"loss": 0.0787, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 4.376708860759494, |
|
"grad_norm": 0.26132922312026663, |
|
"learning_rate": 1.8084319899830726e-06, |
|
"loss": 0.0727, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 4.380759493670886, |
|
"grad_norm": 0.2412561548629328, |
|
"learning_rate": 1.7849202123252097e-06, |
|
"loss": 0.0814, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 4.384810126582279, |
|
"grad_norm": 0.2509077159249742, |
|
"learning_rate": 1.7615551364158401e-06, |
|
"loss": 0.0845, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 4.388860759493671, |
|
"grad_norm": 0.2498209712981625, |
|
"learning_rate": 1.738336950434061e-06, |
|
"loss": 0.074, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 4.392911392405063, |
|
"grad_norm": 0.24934314438911812, |
|
"learning_rate": 1.715265841375957e-06, |
|
"loss": 0.0712, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 4.396962025316456, |
|
"grad_norm": 0.24209788919440325, |
|
"learning_rate": 1.6923419950530684e-06, |
|
"loss": 0.078, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 4.401012658227848, |
|
"grad_norm": 0.2407660103293905, |
|
"learning_rate": 1.6695655960909008e-06, |
|
"loss": 0.0792, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 4.405063291139241, |
|
"grad_norm": 0.2716957548351952, |
|
"learning_rate": 1.646936827927441e-06, |
|
"loss": 0.0861, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 4.409113924050633, |
|
"grad_norm": 0.26447111161503273, |
|
"learning_rate": 1.6244558728116766e-06, |
|
"loss": 0.0668, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 4.413164556962025, |
|
"grad_norm": 0.2553937845664773, |
|
"learning_rate": 1.6021229118021265e-06, |
|
"loss": 0.0789, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 4.417215189873418, |
|
"grad_norm": 0.252130673687367, |
|
"learning_rate": 1.5799381247653967e-06, |
|
"loss": 0.0645, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 4.42126582278481, |
|
"grad_norm": 0.25223692386974356, |
|
"learning_rate": 1.5579016903747013e-06, |
|
"loss": 0.0787, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 4.425316455696202, |
|
"grad_norm": 0.2714650920803946, |
|
"learning_rate": 1.5360137861084656e-06, |
|
"loss": 0.074, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 4.429367088607595, |
|
"grad_norm": 0.24310816090845921, |
|
"learning_rate": 1.5142745882488475e-06, |
|
"loss": 0.0738, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 4.433417721518987, |
|
"grad_norm": 0.2658243820206398, |
|
"learning_rate": 1.4926842718803691e-06, |
|
"loss": 0.0783, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 4.43746835443038, |
|
"grad_norm": 0.2558271037580769, |
|
"learning_rate": 1.4712430108884657e-06, |
|
"loss": 0.0876, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 4.441518987341772, |
|
"grad_norm": 0.24617673671236945, |
|
"learning_rate": 1.4499509779581078e-06, |
|
"loss": 0.0725, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 4.445569620253164, |
|
"grad_norm": 0.25052171834376824, |
|
"learning_rate": 1.4288083445723988e-06, |
|
"loss": 0.0741, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 4.449620253164557, |
|
"grad_norm": 0.2414530964684062, |
|
"learning_rate": 1.4078152810112045e-06, |
|
"loss": 0.0821, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 4.453670886075949, |
|
"grad_norm": 0.2499203816982718, |
|
"learning_rate": 1.3869719563497697e-06, |
|
"loss": 0.0779, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 4.457721518987341, |
|
"grad_norm": 0.2491212265848542, |
|
"learning_rate": 1.3662785384573663e-06, |
|
"loss": 0.0675, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 4.461772151898734, |
|
"grad_norm": 0.24050607534402327, |
|
"learning_rate": 1.3457351939959383e-06, |
|
"loss": 0.0763, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 4.465822784810126, |
|
"grad_norm": 0.24951023394936006, |
|
"learning_rate": 1.3253420884187551e-06, |
|
"loss": 0.0746, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 4.469873417721519, |
|
"grad_norm": 0.25117934485060756, |
|
"learning_rate": 1.3050993859690953e-06, |
|
"loss": 0.0738, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 4.473924050632911, |
|
"grad_norm": 0.25608917777308643, |
|
"learning_rate": 1.2850072496788869e-06, |
|
"loss": 0.0698, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 4.4779746835443035, |
|
"grad_norm": 0.25328025155321565, |
|
"learning_rate": 1.2650658413674434e-06, |
|
"loss": 0.0723, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 4.4820253164556965, |
|
"grad_norm": 0.2515470523397378, |
|
"learning_rate": 1.2452753216401226e-06, |
|
"loss": 0.0796, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 4.4860759493670885, |
|
"grad_norm": 0.25465037633674925, |
|
"learning_rate": 1.2256358498870503e-06, |
|
"loss": 0.0648, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 4.490126582278481, |
|
"grad_norm": 0.248865967434056, |
|
"learning_rate": 1.2061475842818337e-06, |
|
"loss": 0.0875, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 4.494177215189874, |
|
"grad_norm": 0.2551791190151286, |
|
"learning_rate": 1.1868106817802816e-06, |
|
"loss": 0.072, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 4.498227848101266, |
|
"grad_norm": 0.244957009149918, |
|
"learning_rate": 1.1676252981191482e-06, |
|
"loss": 0.0779, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 4.502278481012659, |
|
"grad_norm": 0.2618684012862, |
|
"learning_rate": 1.1485915878148823e-06, |
|
"loss": 0.0704, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 4.506329113924051, |
|
"grad_norm": 0.2467239049557004, |
|
"learning_rate": 1.1297097041623584e-06, |
|
"loss": 0.0758, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 4.510379746835443, |
|
"grad_norm": 0.24889178649997643, |
|
"learning_rate": 1.1109797992336847e-06, |
|
"loss": 0.0661, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 4.514430379746836, |
|
"grad_norm": 0.2401255530147362, |
|
"learning_rate": 1.092402023876933e-06, |
|
"loss": 0.0596, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 4.518481012658228, |
|
"grad_norm": 0.24461867063661066, |
|
"learning_rate": 1.0739765277149527e-06, |
|
"loss": 0.0787, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 4.52253164556962, |
|
"grad_norm": 0.30785101315662, |
|
"learning_rate": 1.0557034591441596e-06, |
|
"loss": 0.0767, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 4.526582278481013, |
|
"grad_norm": 0.2651710743778875, |
|
"learning_rate": 1.0375829653333324e-06, |
|
"loss": 0.0746, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 4.530632911392405, |
|
"grad_norm": 0.2602286510321301, |
|
"learning_rate": 1.0196151922224385e-06, |
|
"loss": 0.0654, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 4.534683544303798, |
|
"grad_norm": 0.260745092731178, |
|
"learning_rate": 1.0018002845214526e-06, |
|
"loss": 0.0753, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 4.53873417721519, |
|
"grad_norm": 0.2594017056356243, |
|
"learning_rate": 9.841383857091947e-07, |
|
"loss": 0.062, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 4.542784810126582, |
|
"grad_norm": 0.26363373529485995, |
|
"learning_rate": 9.666296380321616e-07, |
|
"loss": 0.0838, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 4.546835443037975, |
|
"grad_norm": 0.25445685466377077, |
|
"learning_rate": 9.492741825034124e-07, |
|
"loss": 0.081, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 4.550886075949367, |
|
"grad_norm": 0.2516598776054274, |
|
"learning_rate": 9.320721589013892e-07, |
|
"loss": 0.0823, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 4.55493670886076, |
|
"grad_norm": 0.235673949140923, |
|
"learning_rate": 9.150237057688339e-07, |
|
"loss": 0.0728, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 4.558987341772152, |
|
"grad_norm": 0.25076086880904863, |
|
"learning_rate": 8.981289604116328e-07, |
|
"loss": 0.0819, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 4.563037974683544, |
|
"grad_norm": 0.24833273189985783, |
|
"learning_rate": 8.813880588977542e-07, |
|
"loss": 0.0657, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 4.567088607594937, |
|
"grad_norm": 0.2567444144556877, |
|
"learning_rate": 8.648011360561126e-07, |
|
"loss": 0.0623, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 4.571139240506329, |
|
"grad_norm": 0.24958571210999372, |
|
"learning_rate": 8.483683254755037e-07, |
|
"loss": 0.0794, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 4.575189873417721, |
|
"grad_norm": 0.23500380476510815, |
|
"learning_rate": 8.320897595035227e-07, |
|
"loss": 0.07, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 4.579240506329114, |
|
"grad_norm": 0.25546099879419387, |
|
"learning_rate": 8.159655692455093e-07, |
|
"loss": 0.0686, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 4.583291139240506, |
|
"grad_norm": 0.2596121528059344, |
|
"learning_rate": 7.999958845634648e-07, |
|
"loss": 0.0746, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 4.587341772151898, |
|
"grad_norm": 0.24702574174518246, |
|
"learning_rate": 7.841808340750478e-07, |
|
"loss": 0.0616, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 4.591392405063291, |
|
"grad_norm": 0.23929771306157555, |
|
"learning_rate": 7.685205451524869e-07, |
|
"loss": 0.0745, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 4.595443037974683, |
|
"grad_norm": 0.24615844646472543, |
|
"learning_rate": 7.530151439216027e-07, |
|
"loss": 0.0715, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 4.599493670886076, |
|
"grad_norm": 0.2599948823738649, |
|
"learning_rate": 7.376647552607675e-07, |
|
"loss": 0.0855, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 4.603544303797468, |
|
"grad_norm": 0.24277032117671493, |
|
"learning_rate": 7.224695027998963e-07, |
|
"loss": 0.0665, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 4.6075949367088604, |
|
"grad_norm": 0.24401815298659202, |
|
"learning_rate": 7.07429508919466e-07, |
|
"loss": 0.0904, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 4.611645569620253, |
|
"grad_norm": 0.24178121722666024, |
|
"learning_rate": 6.925448947495206e-07, |
|
"loss": 0.0697, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 4.6156962025316455, |
|
"grad_norm": 0.2571381378193255, |
|
"learning_rate": 6.778157801686936e-07, |
|
"loss": 0.0703, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 4.619746835443038, |
|
"grad_norm": 0.23662543508751135, |
|
"learning_rate": 6.632422838032515e-07, |
|
"loss": 0.068, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 4.6237974683544305, |
|
"grad_norm": 0.24242987591735304, |
|
"learning_rate": 6.488245230261281e-07, |
|
"loss": 0.0697, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 4.627848101265823, |
|
"grad_norm": 0.24965944803662174, |
|
"learning_rate": 6.345626139559868e-07, |
|
"loss": 0.0788, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 4.6318987341772155, |
|
"grad_norm": 0.2665620489742712, |
|
"learning_rate": 6.204566714562866e-07, |
|
"loss": 0.0743, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 4.635949367088608, |
|
"grad_norm": 0.25156629081659354, |
|
"learning_rate": 6.06506809134344e-07, |
|
"loss": 0.0887, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"grad_norm": 0.24953166877311111, |
|
"learning_rate": 5.927131393404373e-07, |
|
"loss": 0.0787, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 4.644050632911393, |
|
"grad_norm": 0.2442811136279155, |
|
"learning_rate": 5.790757731668817e-07, |
|
"loss": 0.0685, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 4.648101265822785, |
|
"grad_norm": 0.2556788423123862, |
|
"learning_rate": 5.655948204471507e-07, |
|
"loss": 0.0743, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 4.652151898734177, |
|
"grad_norm": 0.25452014070575435, |
|
"learning_rate": 5.522703897549875e-07, |
|
"loss": 0.0662, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 4.65620253164557, |
|
"grad_norm": 0.2511470123992533, |
|
"learning_rate": 5.391025884035239e-07, |
|
"loss": 0.0858, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 4.660253164556962, |
|
"grad_norm": 0.2586289094713939, |
|
"learning_rate": 5.260915224444207e-07, |
|
"loss": 0.0694, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 4.664303797468355, |
|
"grad_norm": 0.24180622805649338, |
|
"learning_rate": 5.132372966670129e-07, |
|
"loss": 0.0833, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 4.668354430379747, |
|
"grad_norm": 0.25697420343301, |
|
"learning_rate": 5.005400145974704e-07, |
|
"loss": 0.081, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 4.672405063291139, |
|
"grad_norm": 0.2548330561535777, |
|
"learning_rate": 4.879997784979562e-07, |
|
"loss": 0.0707, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 4.676455696202532, |
|
"grad_norm": 0.24405505466711394, |
|
"learning_rate": 4.7561668936580984e-07, |
|
"loss": 0.0826, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 4.680506329113924, |
|
"grad_norm": 0.24612494592617248, |
|
"learning_rate": 4.6339084693272306e-07, |
|
"loss": 0.0762, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 4.684556962025317, |
|
"grad_norm": 0.24825938188264493, |
|
"learning_rate": 4.5132234966395847e-07, |
|
"loss": 0.0613, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 4.688607594936709, |
|
"grad_norm": 0.24447678622388835, |
|
"learning_rate": 4.3941129475752795e-07, |
|
"loss": 0.0794, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 4.692658227848101, |
|
"grad_norm": 0.2530575754344129, |
|
"learning_rate": 4.27657778143431e-07, |
|
"loss": 0.0862, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 4.696708860759494, |
|
"grad_norm": 0.24745469281731725, |
|
"learning_rate": 4.1606189448287757e-07, |
|
"loss": 0.0732, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 4.700759493670886, |
|
"grad_norm": 0.2475296419897427, |
|
"learning_rate": 4.046237371675177e-07, |
|
"loss": 0.0717, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 4.704810126582278, |
|
"grad_norm": 0.24737444861723035, |
|
"learning_rate": 3.9334339831869963e-07, |
|
"loss": 0.0619, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 4.708860759493671, |
|
"grad_norm": 0.24028057184214424, |
|
"learning_rate": 3.8222096878671955e-07, |
|
"loss": 0.0773, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 4.712911392405063, |
|
"grad_norm": 0.2583192284184501, |
|
"learning_rate": 3.7125653815009545e-07, |
|
"loss": 0.0669, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 4.716962025316455, |
|
"grad_norm": 0.25356799168936894, |
|
"learning_rate": 3.6045019471484974e-07, |
|
"loss": 0.0579, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 4.721012658227848, |
|
"grad_norm": 0.2371246631938819, |
|
"learning_rate": 3.498020255137813e-07, |
|
"loss": 0.0694, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 4.72506329113924, |
|
"grad_norm": 0.2392894158729922, |
|
"learning_rate": 3.393121163057811e-07, |
|
"loss": 0.0772, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 4.729113924050633, |
|
"grad_norm": 0.23505203532971802, |
|
"learning_rate": 3.289805515751399e-07, |
|
"loss": 0.0729, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 4.733164556962025, |
|
"grad_norm": 0.27313742950197145, |
|
"learning_rate": 3.188074145308573e-07, |
|
"loss": 0.0753, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 4.737215189873417, |
|
"grad_norm": 0.24497639376227795, |
|
"learning_rate": 3.087927871059804e-07, |
|
"loss": 0.0811, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 4.74126582278481, |
|
"grad_norm": 0.24370014371504387, |
|
"learning_rate": 2.989367499569418e-07, |
|
"loss": 0.0666, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 4.745316455696202, |
|
"grad_norm": 0.2397882875336495, |
|
"learning_rate": 2.8923938246290917e-07, |
|
"loss": 0.0702, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 4.749367088607595, |
|
"grad_norm": 0.2461561345035232, |
|
"learning_rate": 2.7970076272514804e-07, |
|
"loss": 0.0678, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 4.7534177215189874, |
|
"grad_norm": 0.2399198746431998, |
|
"learning_rate": 2.703209675663887e-07, |
|
"loss": 0.0802, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 4.7574683544303795, |
|
"grad_norm": 0.24162744205377715, |
|
"learning_rate": 2.6110007253021374e-07, |
|
"loss": 0.0654, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 4.7615189873417725, |
|
"grad_norm": 0.2526390841524094, |
|
"learning_rate": 2.520381518804471e-07, |
|
"loss": 0.0756, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 4.7655696202531646, |
|
"grad_norm": 0.2637056062981526, |
|
"learning_rate": 2.4313527860054585e-07, |
|
"loss": 0.0751, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 4.769620253164557, |
|
"grad_norm": 0.2723612819020465, |
|
"learning_rate": 2.343915243930317e-07, |
|
"loss": 0.0712, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 4.77367088607595, |
|
"grad_norm": 0.25828410335125607, |
|
"learning_rate": 2.2580695967889367e-07, |
|
"loss": 0.0781, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 4.777721518987342, |
|
"grad_norm": 0.25024568093593436, |
|
"learning_rate": 2.1738165359704189e-07, |
|
"loss": 0.0858, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 4.781772151898734, |
|
"grad_norm": 0.23696344581501413, |
|
"learning_rate": 2.0911567400373257e-07, |
|
"loss": 0.0807, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 4.785822784810127, |
|
"grad_norm": 0.24601640086114546, |
|
"learning_rate": 2.0100908747202607e-07, |
|
"loss": 0.0765, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 4.789873417721519, |
|
"grad_norm": 0.23788764426597328, |
|
"learning_rate": 1.9306195929125638e-07, |
|
"loss": 0.0846, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 4.793924050632912, |
|
"grad_norm": 0.2417980998460383, |
|
"learning_rate": 1.8527435346650247e-07, |
|
"loss": 0.0814, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 4.797974683544304, |
|
"grad_norm": 0.25349583920455804, |
|
"learning_rate": 1.7764633271807108e-07, |
|
"loss": 0.0787, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 4.802025316455696, |
|
"grad_norm": 0.23286586351781316, |
|
"learning_rate": 1.7017795848099262e-07, |
|
"loss": 0.0718, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 4.806075949367089, |
|
"grad_norm": 0.24137589357668837, |
|
"learning_rate": 1.6286929090452596e-07, |
|
"loss": 0.0725, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 4.810126582278481, |
|
"grad_norm": 0.26140063504887595, |
|
"learning_rate": 1.557203888516745e-07, |
|
"loss": 0.0801, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 4.814177215189874, |
|
"grad_norm": 0.2352179156603715, |
|
"learning_rate": 1.487313098987131e-07, |
|
"loss": 0.0838, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 4.818227848101266, |
|
"grad_norm": 0.2564287501181243, |
|
"learning_rate": 1.4190211033472402e-07, |
|
"loss": 0.0732, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 4.822278481012658, |
|
"grad_norm": 0.23700981992590694, |
|
"learning_rate": 1.3523284516113955e-07, |
|
"loss": 0.0715, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 4.826329113924051, |
|
"grad_norm": 0.24171583088411183, |
|
"learning_rate": 1.2872356809130682e-07, |
|
"loss": 0.0627, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 4.830379746835443, |
|
"grad_norm": 0.24343639042603896, |
|
"learning_rate": 1.2237433155004807e-07, |
|
"loss": 0.0744, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 4.834430379746835, |
|
"grad_norm": 0.25114903895895957, |
|
"learning_rate": 1.1618518667323886e-07, |
|
"loss": 0.0692, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 4.838481012658228, |
|
"grad_norm": 0.2516048155726174, |
|
"learning_rate": 1.1015618330740385e-07, |
|
"loss": 0.0796, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 4.84253164556962, |
|
"grad_norm": 0.25575173265657575, |
|
"learning_rate": 1.042873700093061e-07, |
|
"loss": 0.0627, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 4.846582278481012, |
|
"grad_norm": 0.24558036555707446, |
|
"learning_rate": 9.857879404556291e-08, |
|
"loss": 0.0784, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 4.850632911392405, |
|
"grad_norm": 0.2313221638953138, |
|
"learning_rate": 9.303050139225722e-08, |
|
"loss": 0.0787, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 4.854683544303797, |
|
"grad_norm": 0.24479027703583175, |
|
"learning_rate": 8.76425367345779e-08, |
|
"loss": 0.08, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 4.85873417721519, |
|
"grad_norm": 0.23737782131874363, |
|
"learning_rate": 8.241494346644897e-08, |
|
"loss": 0.061, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 4.862784810126582, |
|
"grad_norm": 0.23895146579088752, |
|
"learning_rate": 7.734776369019204e-08, |
|
"loss": 0.0635, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 4.866835443037974, |
|
"grad_norm": 0.24013263982404476, |
|
"learning_rate": 7.244103821617332e-08, |
|
"loss": 0.0736, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 4.870886075949367, |
|
"grad_norm": 0.26624486625731814, |
|
"learning_rate": 6.769480656248606e-08, |
|
"loss": 0.0733, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 4.874936708860759, |
|
"grad_norm": 0.2575163141808329, |
|
"learning_rate": 6.310910695462635e-08, |
|
"loss": 0.0727, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 4.878987341772152, |
|
"grad_norm": 0.2400975742349225, |
|
"learning_rate": 5.8683976325191185e-08, |
|
"loss": 0.0756, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 4.883037974683544, |
|
"grad_norm": 0.25990441029720845, |
|
"learning_rate": 5.4419450313571984e-08, |
|
"loss": 0.0871, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 4.8870886075949365, |
|
"grad_norm": 0.23536163052395423, |
|
"learning_rate": 5.031556326567488e-08, |
|
"loss": 0.0757, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 4.891139240506329, |
|
"grad_norm": 0.24849644628075399, |
|
"learning_rate": 4.637234823364312e-08, |
|
"loss": 0.0694, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 4.8951898734177215, |
|
"grad_norm": 0.24564660848109632, |
|
"learning_rate": 4.258983697558838e-08, |
|
"loss": 0.0711, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 4.899240506329114, |
|
"grad_norm": 0.251602384408858, |
|
"learning_rate": 3.896805995533548e-08, |
|
"loss": 0.069, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 4.9032911392405065, |
|
"grad_norm": 0.2480295142437249, |
|
"learning_rate": 3.550704634218028e-08, |
|
"loss": 0.077, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 4.907341772151899, |
|
"grad_norm": 0.248883925464766, |
|
"learning_rate": 3.2206824010647676e-08, |
|
"loss": 0.0707, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 4.911392405063291, |
|
"grad_norm": 0.24134899640972052, |
|
"learning_rate": 2.9067419540278476e-08, |
|
"loss": 0.0672, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 4.915443037974684, |
|
"grad_norm": 0.2432991469732212, |
|
"learning_rate": 2.6088858215400638e-08, |
|
"loss": 0.0727, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 4.919493670886076, |
|
"grad_norm": 0.2435540506951672, |
|
"learning_rate": 2.3271164024940564e-08, |
|
"loss": 0.0856, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 4.923544303797469, |
|
"grad_norm": 0.2750037625312916, |
|
"learning_rate": 2.061435966221881e-08, |
|
"loss": 0.0667, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 4.927594936708861, |
|
"grad_norm": 0.23914585807274083, |
|
"learning_rate": 1.811846652477245e-08, |
|
"loss": 0.0821, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 4.931645569620253, |
|
"grad_norm": 0.24333913795552436, |
|
"learning_rate": 1.5783504714184106e-08, |
|
"loss": 0.078, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 4.935696202531646, |
|
"grad_norm": 0.2751121547386763, |
|
"learning_rate": 1.360949303591097e-08, |
|
"loss": 0.0627, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 4.939746835443038, |
|
"grad_norm": 0.24279868326694, |
|
"learning_rate": 1.1596448999144916e-08, |
|
"loss": 0.0694, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 4.943797468354431, |
|
"grad_norm": 0.2557733962965861, |
|
"learning_rate": 9.744388816668172e-09, |
|
"loss": 0.0805, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 4.947848101265823, |
|
"grad_norm": 0.24604625508351258, |
|
"learning_rate": 8.05332740472009e-09, |
|
"loss": 0.0796, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 4.951898734177215, |
|
"grad_norm": 0.25229998114954005, |
|
"learning_rate": 6.523278382872811e-09, |
|
"loss": 0.0747, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 4.955949367088608, |
|
"grad_norm": 0.24203772251834726, |
|
"learning_rate": 5.15425407393133e-09, |
|
"loss": 0.0713, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"grad_norm": 0.2472359442957691, |
|
"learning_rate": 3.94626550383137e-09, |
|
"loss": 0.0721, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 4.964050632911392, |
|
"grad_norm": 0.2374484671986443, |
|
"learning_rate": 2.899322401546112e-09, |
|
"loss": 0.0698, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 4.968101265822785, |
|
"grad_norm": 0.2305848011222641, |
|
"learning_rate": 2.013433199010706e-09, |
|
"loss": 0.0754, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 4.972151898734177, |
|
"grad_norm": 0.25427429642271976, |
|
"learning_rate": 1.2886050310556563e-09, |
|
"loss": 0.0811, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 4.976202531645569, |
|
"grad_norm": 0.2324719489263798, |
|
"learning_rate": 7.248437353468695e-10, |
|
"loss": 0.0681, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 4.980253164556962, |
|
"grad_norm": 0.4569630012065991, |
|
"learning_rate": 3.221538523412449e-10, |
|
"loss": 0.0865, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 4.984303797468354, |
|
"grad_norm": 0.2346491824573878, |
|
"learning_rate": 8.053862524670663e-11, |
|
"loss": 0.0656, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 4.988354430379747, |
|
"grad_norm": 0.24049298571584699, |
|
"learning_rate": 0.0, |
|
"loss": 0.0725, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 4.988354430379747, |
|
"step": 1230, |
|
"total_flos": 1.0507661439810929e+18, |
|
"train_loss": 0.23088123145384518, |
|
"train_runtime": 33475.0693, |
|
"train_samples_per_second": 4.72, |
|
"train_steps_per_second": 0.037 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1230, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.0507661439810929e+18, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|