|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.983291139240507, |
|
"eval_steps": 500, |
|
"global_step": 1230, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.004050632911392405, |
|
"grad_norm": 6.954410400383741, |
|
"learning_rate": 3.2520325203252037e-07, |
|
"loss": 1.0837, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00810126582278481, |
|
"grad_norm": 6.9400501865680555, |
|
"learning_rate": 6.504065040650407e-07, |
|
"loss": 1.0654, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.012151898734177215, |
|
"grad_norm": 6.7987504048943945, |
|
"learning_rate": 9.75609756097561e-07, |
|
"loss": 1.0768, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01620253164556962, |
|
"grad_norm": 6.653575167921407, |
|
"learning_rate": 1.3008130081300815e-06, |
|
"loss": 1.0483, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.020253164556962026, |
|
"grad_norm": 6.559019603143511, |
|
"learning_rate": 1.6260162601626018e-06, |
|
"loss": 1.0553, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02430379746835443, |
|
"grad_norm": 6.150383345282052, |
|
"learning_rate": 1.951219512195122e-06, |
|
"loss": 1.0362, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.028354430379746835, |
|
"grad_norm": 5.046946677704143, |
|
"learning_rate": 2.2764227642276426e-06, |
|
"loss": 1.0315, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.03240506329113924, |
|
"grad_norm": 4.548099510649266, |
|
"learning_rate": 2.601626016260163e-06, |
|
"loss": 0.997, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.03645569620253165, |
|
"grad_norm": 3.0248076855242343, |
|
"learning_rate": 2.926829268292683e-06, |
|
"loss": 0.9453, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.04050632911392405, |
|
"grad_norm": 2.764462686471018, |
|
"learning_rate": 3.2520325203252037e-06, |
|
"loss": 0.9597, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.044556962025316456, |
|
"grad_norm": 2.6330409014681746, |
|
"learning_rate": 3.577235772357724e-06, |
|
"loss": 0.9552, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.04860759493670886, |
|
"grad_norm": 3.8043868831683882, |
|
"learning_rate": 3.902439024390244e-06, |
|
"loss": 0.9285, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.052658227848101265, |
|
"grad_norm": 4.119280633506145, |
|
"learning_rate": 4.227642276422765e-06, |
|
"loss": 0.9286, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.05670886075949367, |
|
"grad_norm": 3.806498484710525, |
|
"learning_rate": 4.552845528455285e-06, |
|
"loss": 0.914, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.060759493670886074, |
|
"grad_norm": 3.349101031015638, |
|
"learning_rate": 4.8780487804878055e-06, |
|
"loss": 0.9049, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.06481012658227848, |
|
"grad_norm": 2.457118661708991, |
|
"learning_rate": 5.203252032520326e-06, |
|
"loss": 0.8648, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.06886075949367089, |
|
"grad_norm": 2.395131480833892, |
|
"learning_rate": 5.528455284552846e-06, |
|
"loss": 0.8447, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0729113924050633, |
|
"grad_norm": 2.027011547926397, |
|
"learning_rate": 5.853658536585366e-06, |
|
"loss": 0.8467, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0769620253164557, |
|
"grad_norm": 1.5016441178113864, |
|
"learning_rate": 6.178861788617887e-06, |
|
"loss": 0.8003, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0810126582278481, |
|
"grad_norm": 1.1862156325870468, |
|
"learning_rate": 6.504065040650407e-06, |
|
"loss": 0.7855, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.08506329113924051, |
|
"grad_norm": 1.169651971202369, |
|
"learning_rate": 6.829268292682928e-06, |
|
"loss": 0.79, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.08911392405063291, |
|
"grad_norm": 1.4733903642449016, |
|
"learning_rate": 7.154471544715448e-06, |
|
"loss": 0.7689, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.09316455696202532, |
|
"grad_norm": 1.3007351461160763, |
|
"learning_rate": 7.4796747967479676e-06, |
|
"loss": 0.768, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.09721518987341772, |
|
"grad_norm": 1.0135913590505714, |
|
"learning_rate": 7.804878048780489e-06, |
|
"loss": 0.752, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.10126582278481013, |
|
"grad_norm": 0.9661750296419254, |
|
"learning_rate": 8.130081300813009e-06, |
|
"loss": 0.7604, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.10531645569620253, |
|
"grad_norm": 0.9929522983734735, |
|
"learning_rate": 8.45528455284553e-06, |
|
"loss": 0.7456, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.10936708860759493, |
|
"grad_norm": 0.9956895280529141, |
|
"learning_rate": 8.78048780487805e-06, |
|
"loss": 0.7252, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.11341772151898734, |
|
"grad_norm": 0.9310742583106661, |
|
"learning_rate": 9.10569105691057e-06, |
|
"loss": 0.7318, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.11746835443037974, |
|
"grad_norm": 0.8568515449253671, |
|
"learning_rate": 9.43089430894309e-06, |
|
"loss": 0.7202, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.12151898734177215, |
|
"grad_norm": 0.7882826888909908, |
|
"learning_rate": 9.756097560975611e-06, |
|
"loss": 0.7111, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.12556962025316457, |
|
"grad_norm": 0.7919361910508409, |
|
"learning_rate": 1.008130081300813e-05, |
|
"loss": 0.7167, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.12962025316455697, |
|
"grad_norm": 0.7880585842507666, |
|
"learning_rate": 1.0406504065040652e-05, |
|
"loss": 0.7192, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.13367088607594937, |
|
"grad_norm": 0.7751376967578392, |
|
"learning_rate": 1.0731707317073172e-05, |
|
"loss": 0.7028, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.13772151898734178, |
|
"grad_norm": 0.7543564519537581, |
|
"learning_rate": 1.1056910569105692e-05, |
|
"loss": 0.7106, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.14177215189873418, |
|
"grad_norm": 0.8329757642433349, |
|
"learning_rate": 1.1382113821138213e-05, |
|
"loss": 0.7001, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.1458227848101266, |
|
"grad_norm": 0.7419723430227314, |
|
"learning_rate": 1.1707317073170731e-05, |
|
"loss": 0.6805, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.149873417721519, |
|
"grad_norm": 0.8731532526048719, |
|
"learning_rate": 1.2032520325203254e-05, |
|
"loss": 0.6994, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.1539240506329114, |
|
"grad_norm": 0.7210480525465929, |
|
"learning_rate": 1.2357723577235774e-05, |
|
"loss": 0.6962, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.1579746835443038, |
|
"grad_norm": 0.7976769488728342, |
|
"learning_rate": 1.2682926829268294e-05, |
|
"loss": 0.6924, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.1620253164556962, |
|
"grad_norm": 0.6495276113429324, |
|
"learning_rate": 1.3008130081300815e-05, |
|
"loss": 0.6877, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.1660759493670886, |
|
"grad_norm": 0.7452636694535398, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 0.6948, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.17012658227848101, |
|
"grad_norm": 0.8246595290829414, |
|
"learning_rate": 1.3658536585365855e-05, |
|
"loss": 0.6855, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.17417721518987342, |
|
"grad_norm": 0.7894603713964571, |
|
"learning_rate": 1.3983739837398376e-05, |
|
"loss": 0.6833, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.17822784810126582, |
|
"grad_norm": 0.9393220260027626, |
|
"learning_rate": 1.4308943089430896e-05, |
|
"loss": 0.6866, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.18227848101265823, |
|
"grad_norm": 0.7011909671509465, |
|
"learning_rate": 1.4634146341463415e-05, |
|
"loss": 0.6859, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.18632911392405063, |
|
"grad_norm": 0.8425658746356457, |
|
"learning_rate": 1.4959349593495935e-05, |
|
"loss": 0.6613, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.19037974683544304, |
|
"grad_norm": 0.7112503449024806, |
|
"learning_rate": 1.528455284552846e-05, |
|
"loss": 0.669, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.19443037974683544, |
|
"grad_norm": 0.8707079693405161, |
|
"learning_rate": 1.5609756097560978e-05, |
|
"loss": 0.6547, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.19848101265822785, |
|
"grad_norm": 0.7922885285886438, |
|
"learning_rate": 1.5934959349593496e-05, |
|
"loss": 0.6857, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.20253164556962025, |
|
"grad_norm": 0.7671348882310398, |
|
"learning_rate": 1.6260162601626018e-05, |
|
"loss": 0.6805, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.20658227848101265, |
|
"grad_norm": 0.6440900127496116, |
|
"learning_rate": 1.6585365853658537e-05, |
|
"loss": 0.6589, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.21063291139240506, |
|
"grad_norm": 0.7917500664178065, |
|
"learning_rate": 1.691056910569106e-05, |
|
"loss": 0.6594, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.21468354430379746, |
|
"grad_norm": 0.6813330384265905, |
|
"learning_rate": 1.7235772357723578e-05, |
|
"loss": 0.6485, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.21873417721518987, |
|
"grad_norm": 0.6904098970937212, |
|
"learning_rate": 1.75609756097561e-05, |
|
"loss": 0.6455, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.22278481012658227, |
|
"grad_norm": 0.7515573252311591, |
|
"learning_rate": 1.788617886178862e-05, |
|
"loss": 0.6488, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.22683544303797468, |
|
"grad_norm": 0.7250902560938726, |
|
"learning_rate": 1.821138211382114e-05, |
|
"loss": 0.6736, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.23088607594936708, |
|
"grad_norm": 0.7592467420123675, |
|
"learning_rate": 1.8536585365853663e-05, |
|
"loss": 0.6677, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.23493670886075949, |
|
"grad_norm": 0.7319093626699653, |
|
"learning_rate": 1.886178861788618e-05, |
|
"loss": 0.6615, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.2389873417721519, |
|
"grad_norm": 0.941881720722119, |
|
"learning_rate": 1.91869918699187e-05, |
|
"loss": 0.6472, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.2430379746835443, |
|
"grad_norm": 1.2047165847910062, |
|
"learning_rate": 1.9512195121951222e-05, |
|
"loss": 0.6479, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.2470886075949367, |
|
"grad_norm": 0.7321403448119913, |
|
"learning_rate": 1.983739837398374e-05, |
|
"loss": 0.6461, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.25113924050632913, |
|
"grad_norm": 0.9518601971353224, |
|
"learning_rate": 2.016260162601626e-05, |
|
"loss": 0.6502, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.25518987341772154, |
|
"grad_norm": 1.0163651023692077, |
|
"learning_rate": 2.048780487804878e-05, |
|
"loss": 0.6602, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.25924050632911394, |
|
"grad_norm": 0.9169486416046632, |
|
"learning_rate": 2.0813008130081303e-05, |
|
"loss": 0.6413, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.26329113924050634, |
|
"grad_norm": 0.6974057537080287, |
|
"learning_rate": 2.1138211382113822e-05, |
|
"loss": 0.6451, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.26734177215189875, |
|
"grad_norm": 1.1670836784716068, |
|
"learning_rate": 2.1463414634146344e-05, |
|
"loss": 0.658, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.27139240506329115, |
|
"grad_norm": 0.9236023691301253, |
|
"learning_rate": 2.1788617886178863e-05, |
|
"loss": 0.6633, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.27544303797468356, |
|
"grad_norm": 0.9347693368298489, |
|
"learning_rate": 2.2113821138211385e-05, |
|
"loss": 0.6339, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.27949367088607596, |
|
"grad_norm": 1.4303176457391067, |
|
"learning_rate": 2.2439024390243907e-05, |
|
"loss": 0.628, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.28354430379746837, |
|
"grad_norm": 0.9600686276432909, |
|
"learning_rate": 2.2764227642276426e-05, |
|
"loss": 0.6424, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.28759493670886077, |
|
"grad_norm": 1.4317751086843131, |
|
"learning_rate": 2.3089430894308948e-05, |
|
"loss": 0.6525, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.2916455696202532, |
|
"grad_norm": 0.9064738079875657, |
|
"learning_rate": 2.3414634146341463e-05, |
|
"loss": 0.6434, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.2956962025316456, |
|
"grad_norm": 1.486491805755955, |
|
"learning_rate": 2.3739837398373985e-05, |
|
"loss": 0.6378, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.299746835443038, |
|
"grad_norm": 0.982784036935644, |
|
"learning_rate": 2.4065040650406507e-05, |
|
"loss": 0.6485, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.3037974683544304, |
|
"grad_norm": 1.4870922981152221, |
|
"learning_rate": 2.4390243902439026e-05, |
|
"loss": 0.6498, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.3078481012658228, |
|
"grad_norm": 1.1435257808275623, |
|
"learning_rate": 2.4715447154471548e-05, |
|
"loss": 0.6425, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.3118987341772152, |
|
"grad_norm": 1.4789920752305985, |
|
"learning_rate": 2.5040650406504066e-05, |
|
"loss": 0.6619, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.3159493670886076, |
|
"grad_norm": 1.1260615625721069, |
|
"learning_rate": 2.536585365853659e-05, |
|
"loss": 0.6461, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.4740105074613266, |
|
"learning_rate": 2.569105691056911e-05, |
|
"loss": 0.619, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.3240506329113924, |
|
"grad_norm": 1.3869940427845926, |
|
"learning_rate": 2.601626016260163e-05, |
|
"loss": 0.626, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.3281012658227848, |
|
"grad_norm": 1.3034212101455251, |
|
"learning_rate": 2.634146341463415e-05, |
|
"loss": 0.6252, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.3321518987341772, |
|
"grad_norm": 1.68289411461455, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 0.644, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.3362025316455696, |
|
"grad_norm": 1.2626834984377235, |
|
"learning_rate": 2.699186991869919e-05, |
|
"loss": 0.6414, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.34025316455696203, |
|
"grad_norm": 1.314289626288198, |
|
"learning_rate": 2.731707317073171e-05, |
|
"loss": 0.6482, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.34430379746835443, |
|
"grad_norm": 1.2491504421527542, |
|
"learning_rate": 2.764227642276423e-05, |
|
"loss": 0.6405, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.34835443037974684, |
|
"grad_norm": 0.8518429377389781, |
|
"learning_rate": 2.796747967479675e-05, |
|
"loss": 0.6417, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.35240506329113924, |
|
"grad_norm": 0.9874957212846749, |
|
"learning_rate": 2.829268292682927e-05, |
|
"loss": 0.6309, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.35645569620253165, |
|
"grad_norm": 1.1199611650893155, |
|
"learning_rate": 2.8617886178861792e-05, |
|
"loss": 0.6381, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.36050632911392405, |
|
"grad_norm": 1.007659922740823, |
|
"learning_rate": 2.8943089430894314e-05, |
|
"loss": 0.6193, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.36455696202531646, |
|
"grad_norm": 0.9381623893482547, |
|
"learning_rate": 2.926829268292683e-05, |
|
"loss": 0.6308, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.36860759493670886, |
|
"grad_norm": 1.4740013444360531, |
|
"learning_rate": 2.959349593495935e-05, |
|
"loss": 0.6449, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.37265822784810126, |
|
"grad_norm": 0.9208216256779698, |
|
"learning_rate": 2.991869918699187e-05, |
|
"loss": 0.6179, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.37670886075949367, |
|
"grad_norm": 1.1077264548000336, |
|
"learning_rate": 3.0243902439024392e-05, |
|
"loss": 0.6094, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.3807594936708861, |
|
"grad_norm": 1.1613227955681877, |
|
"learning_rate": 3.056910569105692e-05, |
|
"loss": 0.6278, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.3848101265822785, |
|
"grad_norm": 1.3362050082614625, |
|
"learning_rate": 3.089430894308943e-05, |
|
"loss": 0.6256, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.3888607594936709, |
|
"grad_norm": 0.9076979750116068, |
|
"learning_rate": 3.1219512195121955e-05, |
|
"loss": 0.6301, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.3929113924050633, |
|
"grad_norm": 1.361369247370263, |
|
"learning_rate": 3.154471544715447e-05, |
|
"loss": 0.6333, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.3969620253164557, |
|
"grad_norm": 0.9358892842812995, |
|
"learning_rate": 3.186991869918699e-05, |
|
"loss": 0.6539, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.4010126582278481, |
|
"grad_norm": 1.2380687982341076, |
|
"learning_rate": 3.2195121951219514e-05, |
|
"loss": 0.6374, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.4050632911392405, |
|
"grad_norm": 1.5932098618922832, |
|
"learning_rate": 3.2520325203252037e-05, |
|
"loss": 0.6381, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.4091139240506329, |
|
"grad_norm": 1.0245121892036138, |
|
"learning_rate": 3.284552845528456e-05, |
|
"loss": 0.6392, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.4131645569620253, |
|
"grad_norm": 1.5607390147240106, |
|
"learning_rate": 3.3170731707317074e-05, |
|
"loss": 0.6328, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.4172151898734177, |
|
"grad_norm": 1.6175403214791868, |
|
"learning_rate": 3.3495934959349596e-05, |
|
"loss": 0.6378, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.4212658227848101, |
|
"grad_norm": 1.0931575136863096, |
|
"learning_rate": 3.382113821138212e-05, |
|
"loss": 0.6309, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.4253164556962025, |
|
"grad_norm": 2.04553597292404, |
|
"learning_rate": 3.414634146341463e-05, |
|
"loss": 0.6295, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.4293670886075949, |
|
"grad_norm": 1.3641814475366492, |
|
"learning_rate": 3.4471544715447155e-05, |
|
"loss": 0.6208, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.43341772151898733, |
|
"grad_norm": 2.175539120577516, |
|
"learning_rate": 3.479674796747968e-05, |
|
"loss": 0.6454, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.43746835443037974, |
|
"grad_norm": 1.7624187916450553, |
|
"learning_rate": 3.51219512195122e-05, |
|
"loss": 0.6326, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.44151898734177214, |
|
"grad_norm": 2.1079482503983638, |
|
"learning_rate": 3.544715447154472e-05, |
|
"loss": 0.6357, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.44556962025316454, |
|
"grad_norm": 1.914026520079883, |
|
"learning_rate": 3.577235772357724e-05, |
|
"loss": 0.6439, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.44962025316455695, |
|
"grad_norm": 1.860470142894017, |
|
"learning_rate": 3.609756097560976e-05, |
|
"loss": 0.6276, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.45367088607594935, |
|
"grad_norm": 2.0408716526069677, |
|
"learning_rate": 3.642276422764228e-05, |
|
"loss": 0.6291, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.45772151898734176, |
|
"grad_norm": 1.7820366220310464, |
|
"learning_rate": 3.67479674796748e-05, |
|
"loss": 0.6185, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.46177215189873416, |
|
"grad_norm": 1.7523377683295287, |
|
"learning_rate": 3.7073170731707325e-05, |
|
"loss": 0.6338, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.46582278481012657, |
|
"grad_norm": 1.7615120198101528, |
|
"learning_rate": 3.739837398373984e-05, |
|
"loss": 0.6336, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.46987341772151897, |
|
"grad_norm": 1.2723732298454176, |
|
"learning_rate": 3.772357723577236e-05, |
|
"loss": 0.6174, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.4739240506329114, |
|
"grad_norm": 2.4191326943774607, |
|
"learning_rate": 3.804878048780488e-05, |
|
"loss": 0.6313, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.4779746835443038, |
|
"grad_norm": 1.8328826567416288, |
|
"learning_rate": 3.83739837398374e-05, |
|
"loss": 0.6292, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.4820253164556962, |
|
"grad_norm": 2.1116309682757213, |
|
"learning_rate": 3.869918699186992e-05, |
|
"loss": 0.6333, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.4860759493670886, |
|
"grad_norm": 1.9680226888243344, |
|
"learning_rate": 3.9024390243902444e-05, |
|
"loss": 0.625, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.490126582278481, |
|
"grad_norm": 2.0494773164220157, |
|
"learning_rate": 3.9349593495934966e-05, |
|
"loss": 0.6229, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.4941772151898734, |
|
"grad_norm": 1.5500534822035332, |
|
"learning_rate": 3.967479674796748e-05, |
|
"loss": 0.6175, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.4982278481012658, |
|
"grad_norm": 1.9139244213929787, |
|
"learning_rate": 4e-05, |
|
"loss": 0.6168, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.5022784810126583, |
|
"grad_norm": 1.5010358406351174, |
|
"learning_rate": 3.999991946137476e-05, |
|
"loss": 0.6193, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.5063291139240507, |
|
"grad_norm": 2.0321580775349424, |
|
"learning_rate": 3.999967784614766e-05, |
|
"loss": 0.5987, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.5103797468354431, |
|
"grad_norm": 1.951754324964686, |
|
"learning_rate": 3.9999275156264656e-05, |
|
"loss": 0.6344, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.5144303797468355, |
|
"grad_norm": 1.2377681100408082, |
|
"learning_rate": 3.999871139496895e-05, |
|
"loss": 0.6133, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.5184810126582279, |
|
"grad_norm": 1.1851428210674686, |
|
"learning_rate": 3.9997986566800995e-05, |
|
"loss": 0.6401, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.5225316455696203, |
|
"grad_norm": 1.2842171163259886, |
|
"learning_rate": 3.999710067759846e-05, |
|
"loss": 0.621, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.5265822784810127, |
|
"grad_norm": 0.925367595604473, |
|
"learning_rate": 3.999605373449617e-05, |
|
"loss": 0.6202, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.5306329113924051, |
|
"grad_norm": 1.6400196345433515, |
|
"learning_rate": 3.9994845745926075e-05, |
|
"loss": 0.5935, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.5346835443037975, |
|
"grad_norm": 0.9476606871594433, |
|
"learning_rate": 3.999347672161713e-05, |
|
"loss": 0.6134, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.5387341772151899, |
|
"grad_norm": 1.9840007533590123, |
|
"learning_rate": 3.999194667259528e-05, |
|
"loss": 0.6175, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.5427848101265823, |
|
"grad_norm": 1.3088630473639644, |
|
"learning_rate": 3.999025561118334e-05, |
|
"loss": 0.6253, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.5468354430379747, |
|
"grad_norm": 1.9717992678008205, |
|
"learning_rate": 3.998840355100086e-05, |
|
"loss": 0.6057, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.5508860759493671, |
|
"grad_norm": 1.5907043418300266, |
|
"learning_rate": 3.998639050696409e-05, |
|
"loss": 0.6493, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.5549367088607595, |
|
"grad_norm": 2.046187139082419, |
|
"learning_rate": 3.998421649528582e-05, |
|
"loss": 0.6203, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.5589873417721519, |
|
"grad_norm": 2.0441984178882873, |
|
"learning_rate": 3.9981881533475234e-05, |
|
"loss": 0.6335, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.5630379746835443, |
|
"grad_norm": 1.3247673726272031, |
|
"learning_rate": 3.997938564033779e-05, |
|
"loss": 0.6201, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.5670886075949367, |
|
"grad_norm": 1.6008636317180505, |
|
"learning_rate": 3.9976728835975064e-05, |
|
"loss": 0.6442, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.5711392405063291, |
|
"grad_norm": 1.3132785590896519, |
|
"learning_rate": 3.9973911141784605e-05, |
|
"loss": 0.6228, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.5751898734177215, |
|
"grad_norm": 1.0799649469045405, |
|
"learning_rate": 3.997093258045973e-05, |
|
"loss": 0.6092, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.579240506329114, |
|
"grad_norm": 1.3084117951246697, |
|
"learning_rate": 3.996779317598936e-05, |
|
"loss": 0.6187, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.5832911392405064, |
|
"grad_norm": 1.3040702214012478, |
|
"learning_rate": 3.996449295365782e-05, |
|
"loss": 0.6332, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.5873417721518988, |
|
"grad_norm": 1.3246849971775365, |
|
"learning_rate": 3.996103194004467e-05, |
|
"loss": 0.6223, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.5913924050632912, |
|
"grad_norm": 1.1133453711361432, |
|
"learning_rate": 3.995741016302441e-05, |
|
"loss": 0.6203, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.5954430379746836, |
|
"grad_norm": 1.5212733020399984, |
|
"learning_rate": 3.9953627651766364e-05, |
|
"loss": 0.64, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.599493670886076, |
|
"grad_norm": 1.1918500074346965, |
|
"learning_rate": 3.9949684436734325e-05, |
|
"loss": 0.6175, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.6035443037974684, |
|
"grad_norm": 1.3304436223181861, |
|
"learning_rate": 3.994558054968643e-05, |
|
"loss": 0.6205, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.6075949367088608, |
|
"grad_norm": 1.389173548371737, |
|
"learning_rate": 3.994131602367481e-05, |
|
"loss": 0.6347, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.6116455696202532, |
|
"grad_norm": 1.2711928984462, |
|
"learning_rate": 3.9936890893045376e-05, |
|
"loss": 0.6027, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.6156962025316456, |
|
"grad_norm": 1.168692041661893, |
|
"learning_rate": 3.993230519343752e-05, |
|
"loss": 0.6198, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.619746835443038, |
|
"grad_norm": 1.3976572786522525, |
|
"learning_rate": 3.992755896178383e-05, |
|
"loss": 0.601, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.6237974683544304, |
|
"grad_norm": 0.9693732962843065, |
|
"learning_rate": 3.992265223630981e-05, |
|
"loss": 0.6033, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.6278481012658228, |
|
"grad_norm": 1.2517932069381732, |
|
"learning_rate": 3.991758505653355e-05, |
|
"loss": 0.618, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.6318987341772152, |
|
"grad_norm": 0.9431714844722686, |
|
"learning_rate": 3.991235746326543e-05, |
|
"loss": 0.6088, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.6359493670886076, |
|
"grad_norm": 1.1337488036130554, |
|
"learning_rate": 3.9906969498607745e-05, |
|
"loss": 0.6268, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.2372875141603026, |
|
"learning_rate": 3.990142120595444e-05, |
|
"loss": 0.6285, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.6440506329113924, |
|
"grad_norm": 1.011131049183032, |
|
"learning_rate": 3.98957126299907e-05, |
|
"loss": 0.6127, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.6481012658227848, |
|
"grad_norm": 1.3097453136267005, |
|
"learning_rate": 3.9889843816692596e-05, |
|
"loss": 0.611, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.6521518987341772, |
|
"grad_norm": 1.0564549665367275, |
|
"learning_rate": 3.9883814813326766e-05, |
|
"loss": 0.5992, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.6562025316455696, |
|
"grad_norm": 0.9769830856598268, |
|
"learning_rate": 3.9877625668449956e-05, |
|
"loss": 0.6081, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.660253164556962, |
|
"grad_norm": 1.1099181496381898, |
|
"learning_rate": 3.98712764319087e-05, |
|
"loss": 0.6224, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.6643037974683544, |
|
"grad_norm": 1.2987698425103593, |
|
"learning_rate": 3.9864767154838864e-05, |
|
"loss": 0.6248, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.6683544303797468, |
|
"grad_norm": 0.8777740399672798, |
|
"learning_rate": 3.9858097889665277e-05, |
|
"loss": 0.6043, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.6724050632911392, |
|
"grad_norm": 1.5400498813956358, |
|
"learning_rate": 3.985126869010129e-05, |
|
"loss": 0.6114, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.6764556962025317, |
|
"grad_norm": 0.9996587995756404, |
|
"learning_rate": 3.984427961114833e-05, |
|
"loss": 0.6217, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.6805063291139241, |
|
"grad_norm": 1.9694227759225438, |
|
"learning_rate": 3.9837130709095475e-05, |
|
"loss": 0.6134, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.6845569620253165, |
|
"grad_norm": 1.3955738162465008, |
|
"learning_rate": 3.982982204151901e-05, |
|
"loss": 0.6191, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.6886075949367089, |
|
"grad_norm": 1.821718015690657, |
|
"learning_rate": 3.982235366728193e-05, |
|
"loss": 0.6228, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.6926582278481013, |
|
"grad_norm": 1.5717503184251056, |
|
"learning_rate": 3.9814725646533505e-05, |
|
"loss": 0.6222, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.6967088607594937, |
|
"grad_norm": 1.6115042694426303, |
|
"learning_rate": 3.9806938040708746e-05, |
|
"loss": 0.6348, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.7007594936708861, |
|
"grad_norm": 1.4286086413149055, |
|
"learning_rate": 3.9798990912527976e-05, |
|
"loss": 0.6101, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.7048101265822785, |
|
"grad_norm": 1.5349889681249824, |
|
"learning_rate": 3.979088432599627e-05, |
|
"loss": 0.6291, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.7088607594936709, |
|
"grad_norm": 1.4745467264198815, |
|
"learning_rate": 3.9782618346402964e-05, |
|
"loss": 0.625, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.7129113924050633, |
|
"grad_norm": 1.2103984621444766, |
|
"learning_rate": 3.977419304032111e-05, |
|
"loss": 0.6064, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.7169620253164557, |
|
"grad_norm": 1.1122496454477153, |
|
"learning_rate": 3.976560847560697e-05, |
|
"loss": 0.612, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.7210126582278481, |
|
"grad_norm": 1.400871717877839, |
|
"learning_rate": 3.9756864721399456e-05, |
|
"loss": 0.6364, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.7250632911392405, |
|
"grad_norm": 1.0480487558870482, |
|
"learning_rate": 3.974796184811956e-05, |
|
"loss": 0.6073, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.7291139240506329, |
|
"grad_norm": 1.6399720785424263, |
|
"learning_rate": 3.973889992746979e-05, |
|
"loss": 0.6253, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.7331645569620253, |
|
"grad_norm": 1.1487687074885622, |
|
"learning_rate": 3.972967903243361e-05, |
|
"loss": 0.6078, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.7372151898734177, |
|
"grad_norm": 1.7256647191494647, |
|
"learning_rate": 3.972029923727486e-05, |
|
"loss": 0.606, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.7412658227848101, |
|
"grad_norm": 1.4815467405829454, |
|
"learning_rate": 3.971076061753709e-05, |
|
"loss": 0.6342, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.7453164556962025, |
|
"grad_norm": 1.5456781459884399, |
|
"learning_rate": 3.9701063250043066e-05, |
|
"loss": 0.6149, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.7493670886075949, |
|
"grad_norm": 1.42055328929477, |
|
"learning_rate": 3.969120721289402e-05, |
|
"loss": 0.6252, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.7534177215189873, |
|
"grad_norm": 1.164121811609642, |
|
"learning_rate": 3.9681192585469146e-05, |
|
"loss": 0.608, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.7574683544303797, |
|
"grad_norm": 1.2381164899030535, |
|
"learning_rate": 3.9671019448424865e-05, |
|
"loss": 0.5928, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.7615189873417721, |
|
"grad_norm": 0.9967579966414553, |
|
"learning_rate": 3.966068788369422e-05, |
|
"loss": 0.5917, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.7655696202531646, |
|
"grad_norm": 1.1090706469905602, |
|
"learning_rate": 3.965019797448622e-05, |
|
"loss": 0.6033, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.769620253164557, |
|
"grad_norm": 1.1198692890397965, |
|
"learning_rate": 3.963954980528515e-05, |
|
"loss": 0.6181, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.7736708860759494, |
|
"grad_norm": 1.3766137535756369, |
|
"learning_rate": 3.9628743461849905e-05, |
|
"loss": 0.6123, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.7777215189873418, |
|
"grad_norm": 1.2633971487281341, |
|
"learning_rate": 3.961777903121329e-05, |
|
"loss": 0.6192, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.7817721518987342, |
|
"grad_norm": 1.1388142275182511, |
|
"learning_rate": 3.960665660168131e-05, |
|
"loss": 0.6125, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.7858227848101266, |
|
"grad_norm": 1.2922040088889029, |
|
"learning_rate": 3.9595376262832485e-05, |
|
"loss": 0.613, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.789873417721519, |
|
"grad_norm": 0.9544800492686206, |
|
"learning_rate": 3.9583938105517127e-05, |
|
"loss": 0.5829, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.7939240506329114, |
|
"grad_norm": 1.309587387971099, |
|
"learning_rate": 3.957234222185657e-05, |
|
"loss": 0.6062, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.7979746835443038, |
|
"grad_norm": 0.927450669490755, |
|
"learning_rate": 3.9560588705242474e-05, |
|
"loss": 0.6065, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.8020253164556962, |
|
"grad_norm": 1.4028833209782143, |
|
"learning_rate": 3.954867765033605e-05, |
|
"loss": 0.6117, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.8060759493670886, |
|
"grad_norm": 1.1388047255444957, |
|
"learning_rate": 3.953660915306728e-05, |
|
"loss": 0.6081, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.810126582278481, |
|
"grad_norm": 1.1534986291069729, |
|
"learning_rate": 3.952438331063419e-05, |
|
"loss": 0.6116, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.8141772151898734, |
|
"grad_norm": 1.1732865302072821, |
|
"learning_rate": 3.951200022150205e-05, |
|
"loss": 0.606, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.8182278481012658, |
|
"grad_norm": 0.8330794708119093, |
|
"learning_rate": 3.949945998540253e-05, |
|
"loss": 0.6098, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.8222784810126582, |
|
"grad_norm": 1.2780310020827839, |
|
"learning_rate": 3.9486762703332993e-05, |
|
"loss": 0.6106, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.8263291139240506, |
|
"grad_norm": 0.9031986914771571, |
|
"learning_rate": 3.947390847755559e-05, |
|
"loss": 0.6131, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.830379746835443, |
|
"grad_norm": 1.1094640162127052, |
|
"learning_rate": 3.946089741159648e-05, |
|
"loss": 0.6079, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.8344303797468354, |
|
"grad_norm": 1.0629140797411722, |
|
"learning_rate": 3.944772961024501e-05, |
|
"loss": 0.6214, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.8384810126582278, |
|
"grad_norm": 0.8947263141622616, |
|
"learning_rate": 3.943440517955285e-05, |
|
"loss": 0.6129, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.8425316455696202, |
|
"grad_norm": 1.070950132163622, |
|
"learning_rate": 3.9420924226833126e-05, |
|
"loss": 0.6156, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.8465822784810126, |
|
"grad_norm": 0.7313141296035833, |
|
"learning_rate": 3.9407286860659566e-05, |
|
"loss": 0.6035, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.850632911392405, |
|
"grad_norm": 1.0621208232698145, |
|
"learning_rate": 3.9393493190865657e-05, |
|
"loss": 0.6125, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.8546835443037974, |
|
"grad_norm": 0.8297601109673262, |
|
"learning_rate": 3.937954332854371e-05, |
|
"loss": 0.603, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.8587341772151899, |
|
"grad_norm": 1.0842415166426773, |
|
"learning_rate": 3.9365437386044016e-05, |
|
"loss": 0.6245, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.8627848101265823, |
|
"grad_norm": 0.9406477756620928, |
|
"learning_rate": 3.935117547697387e-05, |
|
"loss": 0.5952, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.8668354430379747, |
|
"grad_norm": 0.957964930398037, |
|
"learning_rate": 3.933675771619675e-05, |
|
"loss": 0.5975, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.8708860759493671, |
|
"grad_norm": 1.2555652779638378, |
|
"learning_rate": 3.932218421983131e-05, |
|
"loss": 0.6084, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.8749367088607595, |
|
"grad_norm": 0.86878070978041, |
|
"learning_rate": 3.9307455105250484e-05, |
|
"loss": 0.6243, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.8789873417721519, |
|
"grad_norm": 1.0244995141168223, |
|
"learning_rate": 3.929257049108054e-05, |
|
"loss": 0.6062, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.8830379746835443, |
|
"grad_norm": 1.443860249427533, |
|
"learning_rate": 3.927753049720011e-05, |
|
"loss": 0.5874, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.8870886075949367, |
|
"grad_norm": 0.8663628967787877, |
|
"learning_rate": 3.9262335244739234e-05, |
|
"loss": 0.5945, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.8911392405063291, |
|
"grad_norm": 1.221101240064337, |
|
"learning_rate": 3.92469848560784e-05, |
|
"loss": 0.5959, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.8951898734177215, |
|
"grad_norm": 0.8605157920856649, |
|
"learning_rate": 3.923147945484751e-05, |
|
"loss": 0.5846, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.8992405063291139, |
|
"grad_norm": 0.8378182215296612, |
|
"learning_rate": 3.9215819165924956e-05, |
|
"loss": 0.6093, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.9032911392405063, |
|
"grad_norm": 0.6404433781517037, |
|
"learning_rate": 3.920000411543654e-05, |
|
"loss": 0.6063, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.9073417721518987, |
|
"grad_norm": 1.0888078181364826, |
|
"learning_rate": 3.9184034430754495e-05, |
|
"loss": 0.6158, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.9113924050632911, |
|
"grad_norm": 0.8422213424676048, |
|
"learning_rate": 3.916791024049648e-05, |
|
"loss": 0.6101, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.9154430379746835, |
|
"grad_norm": 0.7957737135782372, |
|
"learning_rate": 3.91516316745245e-05, |
|
"loss": 0.6209, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.9194936708860759, |
|
"grad_norm": 1.0194480164148838, |
|
"learning_rate": 3.913519886394389e-05, |
|
"loss": 0.6061, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.9235443037974683, |
|
"grad_norm": 1.3466777738047577, |
|
"learning_rate": 3.911861194110225e-05, |
|
"loss": 0.6025, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.9275949367088607, |
|
"grad_norm": 0.9660390241969244, |
|
"learning_rate": 3.910187103958837e-05, |
|
"loss": 0.6102, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.9316455696202531, |
|
"grad_norm": 1.2649901891365827, |
|
"learning_rate": 3.908497629423117e-05, |
|
"loss": 0.6044, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.9356962025316455, |
|
"grad_norm": 1.1416037966870898, |
|
"learning_rate": 3.9067927841098614e-05, |
|
"loss": 0.6104, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.9397468354430379, |
|
"grad_norm": 0.8753199532251423, |
|
"learning_rate": 3.9050725817496594e-05, |
|
"loss": 0.5985, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.9437974683544303, |
|
"grad_norm": 0.977295084131597, |
|
"learning_rate": 3.9033370361967844e-05, |
|
"loss": 0.6156, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.9478481012658228, |
|
"grad_norm": 0.751475625724539, |
|
"learning_rate": 3.901586161429081e-05, |
|
"loss": 0.596, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.9518987341772152, |
|
"grad_norm": 0.8425923067611041, |
|
"learning_rate": 3.8998199715478545e-05, |
|
"loss": 0.6148, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.9559493670886076, |
|
"grad_norm": 0.9775990078269458, |
|
"learning_rate": 3.8980384807777564e-05, |
|
"loss": 0.6082, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.8953733267050219, |
|
"learning_rate": 3.896241703466667e-05, |
|
"loss": 0.6011, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.9640506329113924, |
|
"grad_norm": 1.0266294493342953, |
|
"learning_rate": 3.894429654085585e-05, |
|
"loss": 0.5998, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.9681012658227848, |
|
"grad_norm": 0.9750240426544625, |
|
"learning_rate": 3.892602347228505e-05, |
|
"loss": 0.5913, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.9721518987341772, |
|
"grad_norm": 0.8308003291204458, |
|
"learning_rate": 3.890759797612307e-05, |
|
"loss": 0.5951, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.9762025316455696, |
|
"grad_norm": 0.986076181883239, |
|
"learning_rate": 3.888902020076632e-05, |
|
"loss": 0.6039, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.980253164556962, |
|
"grad_norm": 1.2635656103741617, |
|
"learning_rate": 3.887029029583764e-05, |
|
"loss": 0.5956, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.9843037974683544, |
|
"grad_norm": 0.9455852149981113, |
|
"learning_rate": 3.8851408412185125e-05, |
|
"loss": 0.5861, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.9883544303797468, |
|
"grad_norm": 1.0584159324395885, |
|
"learning_rate": 3.8832374701880855e-05, |
|
"loss": 0.6132, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.9924050632911392, |
|
"grad_norm": 1.0811629911718001, |
|
"learning_rate": 3.881318931821972e-05, |
|
"loss": 0.5917, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.9964556962025316, |
|
"grad_norm": 0.9477403399860244, |
|
"learning_rate": 3.879385241571817e-05, |
|
"loss": 0.6071, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.0007594936708861, |
|
"grad_norm": 1.2436834332179856, |
|
"learning_rate": 3.8774364150112955e-05, |
|
"loss": 0.7038, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 1.0048101265822784, |
|
"grad_norm": 0.9638437861566009, |
|
"learning_rate": 3.8754724678359884e-05, |
|
"loss": 0.5592, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.008860759493671, |
|
"grad_norm": 1.0195205462687125, |
|
"learning_rate": 3.873493415863256e-05, |
|
"loss": 0.5238, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 1.0129113924050632, |
|
"grad_norm": 0.8763884395184933, |
|
"learning_rate": 3.871499275032111e-05, |
|
"loss": 0.5256, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.0169620253164557, |
|
"grad_norm": 1.2454463268794869, |
|
"learning_rate": 3.869490061403091e-05, |
|
"loss": 0.5769, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 1.021012658227848, |
|
"grad_norm": 0.8986589153450555, |
|
"learning_rate": 3.867465791158124e-05, |
|
"loss": 0.5288, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 1.0250632911392406, |
|
"grad_norm": 0.8518925757174307, |
|
"learning_rate": 3.865426480600407e-05, |
|
"loss": 0.4986, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 1.0291139240506328, |
|
"grad_norm": 1.348489504928881, |
|
"learning_rate": 3.863372146154264e-05, |
|
"loss": 0.5607, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 1.0331645569620254, |
|
"grad_norm": 0.7634655371568729, |
|
"learning_rate": 3.861302804365024e-05, |
|
"loss": 0.5261, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.0372151898734177, |
|
"grad_norm": 1.1094197341691527, |
|
"learning_rate": 3.85921847189888e-05, |
|
"loss": 0.5128, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 1.0412658227848102, |
|
"grad_norm": 0.8771192758331756, |
|
"learning_rate": 3.85711916554276e-05, |
|
"loss": 0.5359, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 1.0453164556962025, |
|
"grad_norm": 0.9501647060341649, |
|
"learning_rate": 3.85500490220419e-05, |
|
"loss": 0.5477, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 1.049367088607595, |
|
"grad_norm": 1.1602003663008746, |
|
"learning_rate": 3.852875698911154e-05, |
|
"loss": 0.5152, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 1.0534177215189873, |
|
"grad_norm": 0.9029510647636, |
|
"learning_rate": 3.850731572811963e-05, |
|
"loss": 0.5615, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.0574683544303798, |
|
"grad_norm": 1.0403602720169456, |
|
"learning_rate": 3.848572541175116e-05, |
|
"loss": 0.528, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 1.061518987341772, |
|
"grad_norm": 1.131859261218781, |
|
"learning_rate": 3.846398621389154e-05, |
|
"loss": 0.523, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 1.0655696202531646, |
|
"grad_norm": 0.8401834424919907, |
|
"learning_rate": 3.84420983096253e-05, |
|
"loss": 0.5322, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 1.0696202531645569, |
|
"grad_norm": 0.7309117877766685, |
|
"learning_rate": 3.8420061875234606e-05, |
|
"loss": 0.5351, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 1.0736708860759494, |
|
"grad_norm": 0.8503214419644856, |
|
"learning_rate": 3.839787708819787e-05, |
|
"loss": 0.5494, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.0777215189873417, |
|
"grad_norm": 0.6846960317707459, |
|
"learning_rate": 3.8375544127188325e-05, |
|
"loss": 0.5344, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 1.0817721518987342, |
|
"grad_norm": 0.8696235595377376, |
|
"learning_rate": 3.8353063172072564e-05, |
|
"loss": 0.5648, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 1.0858227848101265, |
|
"grad_norm": 0.6852078471349666, |
|
"learning_rate": 3.8330434403909105e-05, |
|
"loss": 0.4944, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 1.089873417721519, |
|
"grad_norm": 0.9165859207315454, |
|
"learning_rate": 3.8307658004946934e-05, |
|
"loss": 0.5257, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 1.0939240506329113, |
|
"grad_norm": 1.1448029066857075, |
|
"learning_rate": 3.8284734158624046e-05, |
|
"loss": 0.5308, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.0979746835443038, |
|
"grad_norm": 0.7093320877956338, |
|
"learning_rate": 3.826166304956594e-05, |
|
"loss": 0.528, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 1.1020253164556961, |
|
"grad_norm": 1.3304757380361536, |
|
"learning_rate": 3.8238444863584164e-05, |
|
"loss": 0.546, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 1.1060759493670886, |
|
"grad_norm": 0.8743743449729098, |
|
"learning_rate": 3.821507978767479e-05, |
|
"loss": 0.5049, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 1.110126582278481, |
|
"grad_norm": 0.92894133297133, |
|
"learning_rate": 3.819156801001693e-05, |
|
"loss": 0.5172, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 1.1141772151898734, |
|
"grad_norm": 0.9893456300412816, |
|
"learning_rate": 3.816790971997121e-05, |
|
"loss": 0.5424, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.1182278481012657, |
|
"grad_norm": 0.9801139239828293, |
|
"learning_rate": 3.8144105108078246e-05, |
|
"loss": 0.5403, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 1.1222784810126583, |
|
"grad_norm": 0.6996329860296641, |
|
"learning_rate": 3.81201543660571e-05, |
|
"loss": 0.5308, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 1.1263291139240506, |
|
"grad_norm": 1.1345533307193476, |
|
"learning_rate": 3.809605768680377e-05, |
|
"loss": 0.5238, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 1.130379746835443, |
|
"grad_norm": 0.7520919989057505, |
|
"learning_rate": 3.807181526438958e-05, |
|
"loss": 0.5474, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 1.1344303797468354, |
|
"grad_norm": 0.794550011323583, |
|
"learning_rate": 3.8047427294059697e-05, |
|
"loss": 0.5533, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.1384810126582279, |
|
"grad_norm": 0.6673607268797548, |
|
"learning_rate": 3.802289397223145e-05, |
|
"loss": 0.4961, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 1.1425316455696202, |
|
"grad_norm": 0.8341911995819137, |
|
"learning_rate": 3.7998215496492854e-05, |
|
"loss": 0.5828, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 1.1465822784810127, |
|
"grad_norm": 0.6132443136973689, |
|
"learning_rate": 3.797339206560096e-05, |
|
"loss": 0.5132, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 1.150632911392405, |
|
"grad_norm": 0.6356379033728499, |
|
"learning_rate": 3.794842387948027e-05, |
|
"loss": 0.5196, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 1.1546835443037975, |
|
"grad_norm": 0.604809444480571, |
|
"learning_rate": 3.7923311139221114e-05, |
|
"loss": 0.5183, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.1587341772151898, |
|
"grad_norm": 0.7573407320210432, |
|
"learning_rate": 3.7898054047078054e-05, |
|
"loss": 0.5175, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 1.1627848101265823, |
|
"grad_norm": 0.7527626341375323, |
|
"learning_rate": 3.787265280646825e-05, |
|
"loss": 0.572, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 1.1668354430379746, |
|
"grad_norm": 0.607057118521876, |
|
"learning_rate": 3.7847107621969786e-05, |
|
"loss": 0.5068, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 1.1708860759493671, |
|
"grad_norm": 0.7853849014102718, |
|
"learning_rate": 3.7821418699320064e-05, |
|
"loss": 0.5424, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 1.1749367088607594, |
|
"grad_norm": 0.7181432734011717, |
|
"learning_rate": 3.7795586245414145e-05, |
|
"loss": 0.5583, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.178987341772152, |
|
"grad_norm": 0.6347936149329371, |
|
"learning_rate": 3.776961046830306e-05, |
|
"loss": 0.5218, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 1.1830379746835442, |
|
"grad_norm": 0.7940849882469808, |
|
"learning_rate": 3.774349157719215e-05, |
|
"loss": 0.5431, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 1.1870886075949367, |
|
"grad_norm": 0.6977318096600906, |
|
"learning_rate": 3.7717229782439365e-05, |
|
"loss": 0.5461, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 1.191139240506329, |
|
"grad_norm": 0.6026651185754219, |
|
"learning_rate": 3.769082529555359e-05, |
|
"loss": 0.5162, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 1.1951898734177215, |
|
"grad_norm": 0.711696482947943, |
|
"learning_rate": 3.766427832919294e-05, |
|
"loss": 0.5564, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.1992405063291138, |
|
"grad_norm": 0.6026645501695609, |
|
"learning_rate": 3.7637589097163024e-05, |
|
"loss": 0.5395, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 1.2032911392405063, |
|
"grad_norm": 0.7676258704784457, |
|
"learning_rate": 3.761075781441526e-05, |
|
"loss": 0.5349, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 1.2073417721518986, |
|
"grad_norm": 0.7325564535677532, |
|
"learning_rate": 3.75837846970451e-05, |
|
"loss": 0.5271, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 1.2113924050632912, |
|
"grad_norm": 0.7628311419306474, |
|
"learning_rate": 3.755666996229032e-05, |
|
"loss": 0.5514, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 1.2154430379746834, |
|
"grad_norm": 0.7079645279405029, |
|
"learning_rate": 3.752941382852927e-05, |
|
"loss": 0.5216, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.219493670886076, |
|
"grad_norm": 0.6179398153613539, |
|
"learning_rate": 3.7502016515279115e-05, |
|
"loss": 0.5231, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 1.2235443037974683, |
|
"grad_norm": 0.782592807988237, |
|
"learning_rate": 3.7474478243194043e-05, |
|
"loss": 0.5656, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 1.2275949367088608, |
|
"grad_norm": 0.6289218065165803, |
|
"learning_rate": 3.744679923406351e-05, |
|
"loss": 0.4968, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 1.231645569620253, |
|
"grad_norm": 0.7497492765282757, |
|
"learning_rate": 3.741897971081043e-05, |
|
"loss": 0.6238, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 1.2356962025316456, |
|
"grad_norm": 0.7461821944857912, |
|
"learning_rate": 3.739101989748946e-05, |
|
"loss": 0.4796, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.239746835443038, |
|
"grad_norm": 0.7818311181532216, |
|
"learning_rate": 3.7362920019285066e-05, |
|
"loss": 0.5606, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 1.2437974683544304, |
|
"grad_norm": 0.6659066386688293, |
|
"learning_rate": 3.73346803025098e-05, |
|
"loss": 0.5097, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 1.2478481012658227, |
|
"grad_norm": 0.7425212594202908, |
|
"learning_rate": 3.730630097460247e-05, |
|
"loss": 0.5513, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 1.2518987341772152, |
|
"grad_norm": 0.6912479803768411, |
|
"learning_rate": 3.727778226412628e-05, |
|
"loss": 0.5256, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 1.2559493670886077, |
|
"grad_norm": 0.7385710268644965, |
|
"learning_rate": 3.7249124400767006e-05, |
|
"loss": 0.557, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 0.6485731667621707, |
|
"learning_rate": 3.722032761533114e-05, |
|
"loss": 0.5406, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 1.2640506329113923, |
|
"grad_norm": 0.6410179332168048, |
|
"learning_rate": 3.719139213974403e-05, |
|
"loss": 0.5024, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 1.2681012658227848, |
|
"grad_norm": 0.7772476788126488, |
|
"learning_rate": 3.7162318207048006e-05, |
|
"loss": 0.5584, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 1.2721518987341773, |
|
"grad_norm": 0.6701116268060914, |
|
"learning_rate": 3.713310605140055e-05, |
|
"loss": 0.4975, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 1.2762025316455696, |
|
"grad_norm": 0.7154042254396068, |
|
"learning_rate": 3.710375590807233e-05, |
|
"loss": 0.5546, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.280253164556962, |
|
"grad_norm": 0.8096252649782184, |
|
"learning_rate": 3.7074268013445365e-05, |
|
"loss": 0.5268, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 1.2843037974683544, |
|
"grad_norm": 0.7673995341666917, |
|
"learning_rate": 3.7044642605011114e-05, |
|
"loss": 0.5586, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 1.288354430379747, |
|
"grad_norm": 0.6680255101434694, |
|
"learning_rate": 3.701487992136854e-05, |
|
"loss": 0.5169, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 1.2924050632911392, |
|
"grad_norm": 0.519261338824872, |
|
"learning_rate": 3.69849802022222e-05, |
|
"loss": 0.4875, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 1.2964556962025315, |
|
"grad_norm": 0.7953709135081325, |
|
"learning_rate": 3.6954943688380334e-05, |
|
"loss": 0.5947, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.300506329113924, |
|
"grad_norm": 0.5158012911832226, |
|
"learning_rate": 3.692477062175289e-05, |
|
"loss": 0.5273, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 1.3045569620253166, |
|
"grad_norm": 0.7278128360926331, |
|
"learning_rate": 3.689446124534958e-05, |
|
"loss": 0.5438, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 1.3086075949367089, |
|
"grad_norm": 0.7358432376349991, |
|
"learning_rate": 3.686401580327799e-05, |
|
"loss": 0.5204, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 1.3126582278481012, |
|
"grad_norm": 0.7637597844284224, |
|
"learning_rate": 3.683343454074149e-05, |
|
"loss": 0.5242, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 1.3167088607594937, |
|
"grad_norm": 0.7478727899034836, |
|
"learning_rate": 3.6802717704037386e-05, |
|
"loss": 0.5575, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.3207594936708862, |
|
"grad_norm": 0.7782192392141688, |
|
"learning_rate": 3.6771865540554855e-05, |
|
"loss": 0.5347, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 1.3248101265822785, |
|
"grad_norm": 0.7807495727404549, |
|
"learning_rate": 3.674087829877297e-05, |
|
"loss": 0.5179, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 1.3288607594936708, |
|
"grad_norm": 0.7775124959092382, |
|
"learning_rate": 3.6709756228258735e-05, |
|
"loss": 0.5464, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 1.3329113924050633, |
|
"grad_norm": 0.7901677098335301, |
|
"learning_rate": 3.667849957966501e-05, |
|
"loss": 0.5726, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 1.3369620253164558, |
|
"grad_norm": 0.7475655876222138, |
|
"learning_rate": 3.6647108604728546e-05, |
|
"loss": 0.5024, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.341012658227848, |
|
"grad_norm": 0.8149120442684487, |
|
"learning_rate": 3.661558355626795e-05, |
|
"loss": 0.5332, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 1.3450632911392404, |
|
"grad_norm": 0.8939349270223009, |
|
"learning_rate": 3.658392468818163e-05, |
|
"loss": 0.5139, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 1.349113924050633, |
|
"grad_norm": 1.048261110975364, |
|
"learning_rate": 3.655213225544574e-05, |
|
"loss": 0.5756, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 1.3531645569620254, |
|
"grad_norm": 0.8905279689523212, |
|
"learning_rate": 3.652020651411218e-05, |
|
"loss": 0.4995, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 1.3572151898734177, |
|
"grad_norm": 1.0943134766500848, |
|
"learning_rate": 3.6488147721306474e-05, |
|
"loss": 0.5729, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.36126582278481, |
|
"grad_norm": 0.896179214605804, |
|
"learning_rate": 3.645595613522574e-05, |
|
"loss": 0.5442, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 1.3653164556962025, |
|
"grad_norm": 0.8629632136201635, |
|
"learning_rate": 3.642363201513657e-05, |
|
"loss": 0.5397, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 1.369367088607595, |
|
"grad_norm": 0.7128834196185092, |
|
"learning_rate": 3.6391175621373006e-05, |
|
"loss": 0.5473, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 1.3734177215189873, |
|
"grad_norm": 0.7993657201833773, |
|
"learning_rate": 3.6358587215334355e-05, |
|
"loss": 0.5227, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 1.3774683544303796, |
|
"grad_norm": 0.7429877571683161, |
|
"learning_rate": 3.632586705948318e-05, |
|
"loss": 0.526, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.3815189873417721, |
|
"grad_norm": 0.8158397219728912, |
|
"learning_rate": 3.629301541734311e-05, |
|
"loss": 0.5269, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 1.3855696202531647, |
|
"grad_norm": 0.763925680984283, |
|
"learning_rate": 3.626003255349676e-05, |
|
"loss": 0.5535, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 1.389620253164557, |
|
"grad_norm": 0.8164728705747548, |
|
"learning_rate": 3.622691873358357e-05, |
|
"loss": 0.5074, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 1.3936708860759492, |
|
"grad_norm": 0.5982531873978788, |
|
"learning_rate": 3.61936742242977e-05, |
|
"loss": 0.5448, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 1.3977215189873418, |
|
"grad_norm": 0.8406570050186799, |
|
"learning_rate": 3.6160299293385864e-05, |
|
"loss": 0.5309, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.4017721518987343, |
|
"grad_norm": 0.7188483269768406, |
|
"learning_rate": 3.612679420964516e-05, |
|
"loss": 0.5466, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 1.4058227848101266, |
|
"grad_norm": 0.7355198570177172, |
|
"learning_rate": 3.609315924292092e-05, |
|
"loss": 0.5247, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 1.4098734177215189, |
|
"grad_norm": 0.9070012544967203, |
|
"learning_rate": 3.6059394664104554e-05, |
|
"loss": 0.5518, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 1.4139240506329114, |
|
"grad_norm": 0.8053420645672132, |
|
"learning_rate": 3.602550074513133e-05, |
|
"loss": 0.5114, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 1.417974683544304, |
|
"grad_norm": 0.7172107469358631, |
|
"learning_rate": 3.599147775897822e-05, |
|
"loss": 0.5665, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.4220253164556962, |
|
"grad_norm": 0.8668334688653698, |
|
"learning_rate": 3.595732597966167e-05, |
|
"loss": 0.5428, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 1.4260759493670885, |
|
"grad_norm": 0.7062853417722891, |
|
"learning_rate": 3.592304568223542e-05, |
|
"loss": 0.5096, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 1.430126582278481, |
|
"grad_norm": 0.7171562259025508, |
|
"learning_rate": 3.588863714278826e-05, |
|
"loss": 0.5339, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 1.4341772151898735, |
|
"grad_norm": 0.7143425070615537, |
|
"learning_rate": 3.585410063844186e-05, |
|
"loss": 0.5236, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 1.4382278481012658, |
|
"grad_norm": 0.8734677621493264, |
|
"learning_rate": 3.581943644734846e-05, |
|
"loss": 0.5419, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.442278481012658, |
|
"grad_norm": 0.6654257823819231, |
|
"learning_rate": 3.578464484868869e-05, |
|
"loss": 0.5218, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 1.4463291139240506, |
|
"grad_norm": 0.8771206413013289, |
|
"learning_rate": 3.5749726122669316e-05, |
|
"loss": 0.5663, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 1.4503797468354431, |
|
"grad_norm": 0.8280025228340774, |
|
"learning_rate": 3.5714680550520943e-05, |
|
"loss": 0.4775, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 1.4544303797468354, |
|
"grad_norm": 0.8613088292236915, |
|
"learning_rate": 3.5679508414495794e-05, |
|
"loss": 0.5176, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 1.4584810126582277, |
|
"grad_norm": 0.8869421010766709, |
|
"learning_rate": 3.564420999786543e-05, |
|
"loss": 0.5635, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.4625316455696202, |
|
"grad_norm": 0.8078946982822428, |
|
"learning_rate": 3.560878558491842e-05, |
|
"loss": 0.5411, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 1.4665822784810127, |
|
"grad_norm": 0.9527052198102269, |
|
"learning_rate": 3.5573235460958145e-05, |
|
"loss": 0.5279, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 1.470632911392405, |
|
"grad_norm": 0.7379611889308306, |
|
"learning_rate": 3.553755991230039e-05, |
|
"loss": 0.4948, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 1.4746835443037973, |
|
"grad_norm": 1.037536832374824, |
|
"learning_rate": 3.5501759226271144e-05, |
|
"loss": 0.5615, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 1.4787341772151898, |
|
"grad_norm": 1.0693374991753821, |
|
"learning_rate": 3.546583369120419e-05, |
|
"loss": 0.5444, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.4827848101265824, |
|
"grad_norm": 0.7138031816922276, |
|
"learning_rate": 3.5429783596438864e-05, |
|
"loss": 0.5548, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 1.4868354430379747, |
|
"grad_norm": 1.3170934039355493, |
|
"learning_rate": 3.539360923231766e-05, |
|
"loss": 0.5065, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 1.490886075949367, |
|
"grad_norm": 0.8200775846675421, |
|
"learning_rate": 3.535731089018394e-05, |
|
"loss": 0.5513, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 1.4949367088607595, |
|
"grad_norm": 1.6124921248441304, |
|
"learning_rate": 3.532088886237956e-05, |
|
"loss": 0.5321, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 1.498987341772152, |
|
"grad_norm": 1.1346409082267763, |
|
"learning_rate": 3.528434344224253e-05, |
|
"loss": 0.4799, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.5030379746835443, |
|
"grad_norm": 1.4339834045853836, |
|
"learning_rate": 3.524767492410464e-05, |
|
"loss": 0.5424, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 1.5070886075949366, |
|
"grad_norm": 1.5250631610232088, |
|
"learning_rate": 3.521088360328908e-05, |
|
"loss": 0.6127, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.511139240506329, |
|
"grad_norm": 1.082972026084195, |
|
"learning_rate": 3.517396977610811e-05, |
|
"loss": 0.5136, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 1.5151898734177216, |
|
"grad_norm": 1.2295060702908684, |
|
"learning_rate": 3.5136933739860595e-05, |
|
"loss": 0.5643, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 1.519240506329114, |
|
"grad_norm": 1.1285295609201969, |
|
"learning_rate": 3.509977579282971e-05, |
|
"loss": 0.5344, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.5232911392405062, |
|
"grad_norm": 0.9033979036969477, |
|
"learning_rate": 3.5062496234280424e-05, |
|
"loss": 0.5179, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 1.5273417721518987, |
|
"grad_norm": 1.2815545144400042, |
|
"learning_rate": 3.502509536445719e-05, |
|
"loss": 0.5404, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 1.5313924050632912, |
|
"grad_norm": 0.8754576548827399, |
|
"learning_rate": 3.498757348458147e-05, |
|
"loss": 0.5252, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 1.5354430379746835, |
|
"grad_norm": 1.1824708259500432, |
|
"learning_rate": 3.4949930896849324e-05, |
|
"loss": 0.5085, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 1.5394936708860758, |
|
"grad_norm": 0.8337980191058465, |
|
"learning_rate": 3.491216790442899e-05, |
|
"loss": 0.5192, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.5435443037974683, |
|
"grad_norm": 1.255239915007499, |
|
"learning_rate": 3.487428481145839e-05, |
|
"loss": 0.5515, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 1.5475949367088608, |
|
"grad_norm": 0.802745331443439, |
|
"learning_rate": 3.483628192304278e-05, |
|
"loss": 0.5048, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 1.5516455696202531, |
|
"grad_norm": 1.3700122073883318, |
|
"learning_rate": 3.479815954525219e-05, |
|
"loss": 0.5635, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 1.5556962025316454, |
|
"grad_norm": 0.8267956444699474, |
|
"learning_rate": 3.475991798511899e-05, |
|
"loss": 0.5003, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 1.559746835443038, |
|
"grad_norm": 1.586334408935424, |
|
"learning_rate": 3.4721557550635464e-05, |
|
"loss": 0.5723, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.5637974683544305, |
|
"grad_norm": 1.0006891398748612, |
|
"learning_rate": 3.468307855075128e-05, |
|
"loss": 0.5362, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 1.5678481012658227, |
|
"grad_norm": 1.3721405586236546, |
|
"learning_rate": 3.4644481295371005e-05, |
|
"loss": 0.4872, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 1.571898734177215, |
|
"grad_norm": 1.6702956322971638, |
|
"learning_rate": 3.460576609535163e-05, |
|
"loss": 0.5897, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 1.5759493670886076, |
|
"grad_norm": 0.8034602163346699, |
|
"learning_rate": 3.456693326250006e-05, |
|
"loss": 0.4987, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 1.4948796979048882, |
|
"learning_rate": 3.452798310957058e-05, |
|
"loss": 0.5368, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.5840506329113924, |
|
"grad_norm": 0.6712868309282561, |
|
"learning_rate": 3.4488915950262386e-05, |
|
"loss": 0.5213, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 1.5881012658227847, |
|
"grad_norm": 1.4902244299098586, |
|
"learning_rate": 3.4449732099216985e-05, |
|
"loss": 0.5492, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 1.5921518987341772, |
|
"grad_norm": 1.1788229748503278, |
|
"learning_rate": 3.441043187201574e-05, |
|
"loss": 0.5563, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 1.5962025316455697, |
|
"grad_norm": 1.0962935069258886, |
|
"learning_rate": 3.437101558517728e-05, |
|
"loss": 0.4841, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 1.600253164556962, |
|
"grad_norm": 1.2827731023765487, |
|
"learning_rate": 3.433148355615496e-05, |
|
"loss": 0.5517, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.6043037974683543, |
|
"grad_norm": 0.8593718505449981, |
|
"learning_rate": 3.4291836103334294e-05, |
|
"loss": 0.5328, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.6083544303797468, |
|
"grad_norm": 0.9861967973750696, |
|
"learning_rate": 3.425207354603043e-05, |
|
"loss": 0.5378, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 1.6124050632911393, |
|
"grad_norm": 0.7329773454588082, |
|
"learning_rate": 3.421219620448553e-05, |
|
"loss": 0.473, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 1.6164556962025316, |
|
"grad_norm": 1.0463843620838313, |
|
"learning_rate": 3.417220439986623e-05, |
|
"loss": 0.5424, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 1.620506329113924, |
|
"grad_norm": 0.8542830938780757, |
|
"learning_rate": 3.4132098454261024e-05, |
|
"loss": 0.5633, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.6245569620253164, |
|
"grad_norm": 0.8173024575268236, |
|
"learning_rate": 3.4091878690677676e-05, |
|
"loss": 0.4836, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 1.628607594936709, |
|
"grad_norm": 0.6466132951606368, |
|
"learning_rate": 3.405154543304065e-05, |
|
"loss": 0.5346, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 1.6326582278481012, |
|
"grad_norm": 0.8444118498149114, |
|
"learning_rate": 3.401109900618843e-05, |
|
"loss": 0.5625, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 1.6367088607594935, |
|
"grad_norm": 0.6234632699296221, |
|
"learning_rate": 3.3970539735870996e-05, |
|
"loss": 0.5366, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 1.640759493670886, |
|
"grad_norm": 0.7859143697415626, |
|
"learning_rate": 3.392986794874714e-05, |
|
"loss": 0.5421, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.6448101265822785, |
|
"grad_norm": 0.6451752432758884, |
|
"learning_rate": 3.388908397238184e-05, |
|
"loss": 0.5048, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 1.6488607594936708, |
|
"grad_norm": 0.8490154068029508, |
|
"learning_rate": 3.384818813524362e-05, |
|
"loss": 0.545, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 1.6529113924050631, |
|
"grad_norm": 0.6651407785115806, |
|
"learning_rate": 3.380718076670195e-05, |
|
"loss": 0.5303, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.6569620253164556, |
|
"grad_norm": 0.7876377459377151, |
|
"learning_rate": 3.376606219702454e-05, |
|
"loss": 0.5362, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 1.6610126582278482, |
|
"grad_norm": 0.5386576944935724, |
|
"learning_rate": 3.372483275737468e-05, |
|
"loss": 0.5227, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.6650632911392405, |
|
"grad_norm": 0.8408366515495476, |
|
"learning_rate": 3.368349277980861e-05, |
|
"loss": 0.5746, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 1.6691139240506327, |
|
"grad_norm": 0.6528637092171089, |
|
"learning_rate": 3.3642042597272844e-05, |
|
"loss": 0.5078, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 1.6731645569620253, |
|
"grad_norm": 0.7498749417735426, |
|
"learning_rate": 3.360048254360144e-05, |
|
"loss": 0.5418, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 1.6772151898734178, |
|
"grad_norm": 0.6525914263058754, |
|
"learning_rate": 3.355881295351336e-05, |
|
"loss": 0.5412, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 1.68126582278481, |
|
"grad_norm": 0.6446934840865008, |
|
"learning_rate": 3.351703416260975e-05, |
|
"loss": 0.5221, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.6853164556962026, |
|
"grad_norm": 0.6671260437359778, |
|
"learning_rate": 3.347514650737126e-05, |
|
"loss": 0.4896, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 1.6893670886075949, |
|
"grad_norm": 0.657120516319114, |
|
"learning_rate": 3.3433150325155295e-05, |
|
"loss": 0.578, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 1.6934177215189874, |
|
"grad_norm": 0.6191300011832233, |
|
"learning_rate": 3.339104595419334e-05, |
|
"loss": 0.5039, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 1.69746835443038, |
|
"grad_norm": 0.696398367403582, |
|
"learning_rate": 3.3348833733588204e-05, |
|
"loss": 0.5314, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 1.7015189873417722, |
|
"grad_norm": 0.5702118591989692, |
|
"learning_rate": 3.3306514003311305e-05, |
|
"loss": 0.5166, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.7055696202531645, |
|
"grad_norm": 0.6919246993681929, |
|
"learning_rate": 3.326408710419996e-05, |
|
"loss": 0.5615, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 1.709620253164557, |
|
"grad_norm": 0.6055434786653293, |
|
"learning_rate": 3.322155337795454e-05, |
|
"loss": 0.5376, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 1.7136708860759495, |
|
"grad_norm": 0.7329285640587678, |
|
"learning_rate": 3.317891316713587e-05, |
|
"loss": 0.5845, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 1.7177215189873418, |
|
"grad_norm": 0.6827633423355121, |
|
"learning_rate": 3.313616681516231e-05, |
|
"loss": 0.5085, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 1.7217721518987341, |
|
"grad_norm": 0.5533513988811852, |
|
"learning_rate": 3.309331466630713e-05, |
|
"loss": 0.5317, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.7258227848101266, |
|
"grad_norm": 0.5557611331384712, |
|
"learning_rate": 3.305035706569563e-05, |
|
"loss": 0.4868, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 1.7298734177215191, |
|
"grad_norm": 0.6515161778773387, |
|
"learning_rate": 3.3007294359302433e-05, |
|
"loss": 0.5291, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 1.7339240506329114, |
|
"grad_norm": 0.548235080196943, |
|
"learning_rate": 3.296412689394864e-05, |
|
"loss": 0.5124, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 1.7379746835443037, |
|
"grad_norm": 0.6422666558080213, |
|
"learning_rate": 3.292085501729909e-05, |
|
"loss": 0.5252, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 1.7420253164556962, |
|
"grad_norm": 0.6463530863952022, |
|
"learning_rate": 3.2877479077859534e-05, |
|
"loss": 0.5255, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.7460759493670888, |
|
"grad_norm": 0.6129207275947827, |
|
"learning_rate": 3.283399942497381e-05, |
|
"loss": 0.5533, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 1.750126582278481, |
|
"grad_norm": 0.6041943265661842, |
|
"learning_rate": 3.279041640882108e-05, |
|
"loss": 0.5223, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 1.7541772151898734, |
|
"grad_norm": 0.6293625219699406, |
|
"learning_rate": 3.2746730380412964e-05, |
|
"loss": 0.4969, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 1.7582278481012659, |
|
"grad_norm": 0.6051251727962021, |
|
"learning_rate": 3.2702941691590726e-05, |
|
"loss": 0.5915, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 1.7622784810126584, |
|
"grad_norm": 0.5803773565732196, |
|
"learning_rate": 3.265905069502244e-05, |
|
"loss": 0.4675, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.7663291139240507, |
|
"grad_norm": 0.6611714090537727, |
|
"learning_rate": 3.261505774420016e-05, |
|
"loss": 0.538, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 1.770379746835443, |
|
"grad_norm": 0.9164302376294405, |
|
"learning_rate": 3.257096319343707e-05, |
|
"loss": 0.5442, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 1.7744303797468355, |
|
"grad_norm": 0.660529992730085, |
|
"learning_rate": 3.2526767397864614e-05, |
|
"loss": 0.5176, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 1.778481012658228, |
|
"grad_norm": 0.7219912638653426, |
|
"learning_rate": 3.248247071342966e-05, |
|
"loss": 0.5102, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 1.7825316455696203, |
|
"grad_norm": 0.5456527223371624, |
|
"learning_rate": 3.243807349689161e-05, |
|
"loss": 0.5216, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.7865822784810126, |
|
"grad_norm": 0.7188805441524913, |
|
"learning_rate": 3.2393576105819544e-05, |
|
"loss": 0.5151, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 1.790632911392405, |
|
"grad_norm": 0.7881440981364776, |
|
"learning_rate": 3.2348978898589333e-05, |
|
"loss": 0.5771, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 1.7946835443037976, |
|
"grad_norm": 0.5857343140429639, |
|
"learning_rate": 3.230428223438075e-05, |
|
"loss": 0.5028, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 1.79873417721519, |
|
"grad_norm": 0.7189572571672036, |
|
"learning_rate": 3.225948647317459e-05, |
|
"loss": 0.5291, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 1.8027848101265822, |
|
"grad_norm": 0.6731676938115837, |
|
"learning_rate": 3.2214591975749745e-05, |
|
"loss": 0.5198, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.8068354430379747, |
|
"grad_norm": 0.6505047149857615, |
|
"learning_rate": 3.216959910368034e-05, |
|
"loss": 0.5491, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 1.8108860759493672, |
|
"grad_norm": 0.6961239245600651, |
|
"learning_rate": 3.212450821933277e-05, |
|
"loss": 0.498, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 1.8149367088607595, |
|
"grad_norm": 0.6006940619338451, |
|
"learning_rate": 3.207931968586281e-05, |
|
"loss": 0.537, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 1.8189873417721518, |
|
"grad_norm": 0.6072928759056202, |
|
"learning_rate": 3.203403386721272e-05, |
|
"loss": 0.5134, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 1.8230379746835443, |
|
"grad_norm": 0.7509347171910691, |
|
"learning_rate": 3.1988651128108245e-05, |
|
"loss": 0.5594, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.8270886075949369, |
|
"grad_norm": 0.5462004767276315, |
|
"learning_rate": 3.194317183405573e-05, |
|
"loss": 0.5103, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 1.8311392405063291, |
|
"grad_norm": 0.7401160079373621, |
|
"learning_rate": 3.189759635133914e-05, |
|
"loss": 0.5362, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 1.8351898734177214, |
|
"grad_norm": 0.8324607521737274, |
|
"learning_rate": 3.185192504701718e-05, |
|
"loss": 0.5393, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 1.839240506329114, |
|
"grad_norm": 0.551488193585642, |
|
"learning_rate": 3.1806158288920234e-05, |
|
"loss": 0.5197, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 1.8432911392405065, |
|
"grad_norm": 0.7830603835369453, |
|
"learning_rate": 3.1760296445647477e-05, |
|
"loss": 0.4942, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.8473417721518988, |
|
"grad_norm": 0.5783266141423138, |
|
"learning_rate": 3.1714339886563896e-05, |
|
"loss": 0.49, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.851392405063291, |
|
"grad_norm": 0.6400544027725846, |
|
"learning_rate": 3.166828898179731e-05, |
|
"loss": 0.5464, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 1.8554430379746836, |
|
"grad_norm": 0.8369562212066275, |
|
"learning_rate": 3.162214410223536e-05, |
|
"loss": 0.5269, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 1.859493670886076, |
|
"grad_norm": 0.5856264576567846, |
|
"learning_rate": 3.157590561952257e-05, |
|
"loss": 0.5317, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 1.8635443037974684, |
|
"grad_norm": 0.6945156690654839, |
|
"learning_rate": 3.152957390605732e-05, |
|
"loss": 0.4919, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.8675949367088607, |
|
"grad_norm": 0.7468991026233852, |
|
"learning_rate": 3.148314933498886e-05, |
|
"loss": 0.5585, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 1.8716455696202532, |
|
"grad_norm": 0.5931798966489197, |
|
"learning_rate": 3.143663228021431e-05, |
|
"loss": 0.5648, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.8756962025316457, |
|
"grad_norm": 0.6380303717176617, |
|
"learning_rate": 3.1390023116375624e-05, |
|
"loss": 0.5056, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 1.879746835443038, |
|
"grad_norm": 0.6404848675497172, |
|
"learning_rate": 3.134332221885661e-05, |
|
"loss": 0.5193, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 1.8837974683544303, |
|
"grad_norm": 0.6095747686339011, |
|
"learning_rate": 3.129652996377987e-05, |
|
"loss": 0.5351, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.8878481012658228, |
|
"grad_norm": 0.7113563185806772, |
|
"learning_rate": 3.12496467280038e-05, |
|
"loss": 0.5103, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 1.8918987341772153, |
|
"grad_norm": 0.6586780019617671, |
|
"learning_rate": 3.120267288911952e-05, |
|
"loss": 0.5616, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 1.8959493670886076, |
|
"grad_norm": 0.68424999453921, |
|
"learning_rate": 3.11556088254479e-05, |
|
"loss": 0.5621, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 0.5866570130780453, |
|
"learning_rate": 3.11084549160364e-05, |
|
"loss": 0.5331, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 1.9040506329113924, |
|
"grad_norm": 0.7049678346378531, |
|
"learning_rate": 3.106121154065615e-05, |
|
"loss": 0.5551, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.908101265822785, |
|
"grad_norm": 0.5573047248480321, |
|
"learning_rate": 3.1013879079798805e-05, |
|
"loss": 0.5159, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 1.9121518987341772, |
|
"grad_norm": 0.6152448790186686, |
|
"learning_rate": 3.096645791467348e-05, |
|
"loss": 0.4909, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 1.9162025316455695, |
|
"grad_norm": 0.5725696450057538, |
|
"learning_rate": 3.091894842720373e-05, |
|
"loss": 0.5401, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 1.920253164556962, |
|
"grad_norm": 0.9174494751608109, |
|
"learning_rate": 3.0871351000024425e-05, |
|
"loss": 0.5229, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 1.9243037974683546, |
|
"grad_norm": 0.6820056903748684, |
|
"learning_rate": 3.0823666016478716e-05, |
|
"loss": 0.5222, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.9283544303797469, |
|
"grad_norm": 0.6531775914330018, |
|
"learning_rate": 3.0775893860614896e-05, |
|
"loss": 0.5548, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 1.9324050632911391, |
|
"grad_norm": 0.7219375197681888, |
|
"learning_rate": 3.0728034917183336e-05, |
|
"loss": 0.5349, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 1.9364556962025317, |
|
"grad_norm": 0.45830651996684696, |
|
"learning_rate": 3.06800895716334e-05, |
|
"loss": 0.5079, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 1.9405063291139242, |
|
"grad_norm": 0.766580151973448, |
|
"learning_rate": 3.063205821011029e-05, |
|
"loss": 0.5313, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 1.9445569620253165, |
|
"grad_norm": 0.6849201662198069, |
|
"learning_rate": 3.0583941219452016e-05, |
|
"loss": 0.5208, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.9486075949367088, |
|
"grad_norm": 0.5058876423464879, |
|
"learning_rate": 3.053573898718618e-05, |
|
"loss": 0.5068, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 1.9526582278481013, |
|
"grad_norm": 0.6866379614602172, |
|
"learning_rate": 3.0487451901526956e-05, |
|
"loss": 0.5408, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 1.9567088607594938, |
|
"grad_norm": 0.5374063687876747, |
|
"learning_rate": 3.0439080351371875e-05, |
|
"loss": 0.5503, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 1.960759493670886, |
|
"grad_norm": 0.6688932201293888, |
|
"learning_rate": 3.0390624726298764e-05, |
|
"loss": 0.4961, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 1.9648101265822784, |
|
"grad_norm": 0.6820234915996668, |
|
"learning_rate": 3.034208541656255e-05, |
|
"loss": 0.5225, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.968860759493671, |
|
"grad_norm": 0.5260888237593319, |
|
"learning_rate": 3.029346281309218e-05, |
|
"loss": 0.5742, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.9729113924050634, |
|
"grad_norm": 0.637750016180435, |
|
"learning_rate": 3.0244757307487415e-05, |
|
"loss": 0.5204, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 1.9769620253164557, |
|
"grad_norm": 0.6059491605210939, |
|
"learning_rate": 3.019596929201569e-05, |
|
"loss": 0.5375, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 1.981012658227848, |
|
"grad_norm": 0.5073902709569702, |
|
"learning_rate": 3.0147099159608985e-05, |
|
"loss": 0.5088, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 1.9850632911392405, |
|
"grad_norm": 0.5974956505847179, |
|
"learning_rate": 3.0098147303860616e-05, |
|
"loss": 0.5077, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.989113924050633, |
|
"grad_norm": 0.6135404872694891, |
|
"learning_rate": 3.0049114119022117e-05, |
|
"loss": 0.5752, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 1.9931645569620253, |
|
"grad_norm": 0.5957821478548706, |
|
"learning_rate": 3.0000000000000004e-05, |
|
"loss": 0.5259, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.9972151898734176, |
|
"grad_norm": 0.5588008264838369, |
|
"learning_rate": 2.995080534235264e-05, |
|
"loss": 0.5772, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 2.0015189873417722, |
|
"grad_norm": 0.7142109061370712, |
|
"learning_rate": 2.9901530542287044e-05, |
|
"loss": 0.547, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 2.0055696202531648, |
|
"grad_norm": 0.5971778243167354, |
|
"learning_rate": 2.9852175996655676e-05, |
|
"loss": 0.4335, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 2.009620253164557, |
|
"grad_norm": 1.0858711741321507, |
|
"learning_rate": 2.980274210295326e-05, |
|
"loss": 0.4607, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 2.0136708860759494, |
|
"grad_norm": 0.89346863876639, |
|
"learning_rate": 2.9753229259313578e-05, |
|
"loss": 0.4635, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 2.017721518987342, |
|
"grad_norm": 0.7474366873719537, |
|
"learning_rate": 2.9703637864506274e-05, |
|
"loss": 0.3921, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 2.0217721518987344, |
|
"grad_norm": 0.8142073960057717, |
|
"learning_rate": 2.965396831793362e-05, |
|
"loss": 0.441, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 2.0258227848101265, |
|
"grad_norm": 0.7103985867494478, |
|
"learning_rate": 2.9604221019627316e-05, |
|
"loss": 0.4124, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.029873417721519, |
|
"grad_norm": 0.6985428659913748, |
|
"learning_rate": 2.955439637024526e-05, |
|
"loss": 0.429, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 2.0339240506329115, |
|
"grad_norm": 0.7242246572253824, |
|
"learning_rate": 2.9504494771068334e-05, |
|
"loss": 0.4562, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 2.037974683544304, |
|
"grad_norm": 0.6757313556469043, |
|
"learning_rate": 2.9454516623997156e-05, |
|
"loss": 0.4225, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 2.042025316455696, |
|
"grad_norm": 0.7235850959150214, |
|
"learning_rate": 2.9404462331548847e-05, |
|
"loss": 0.4187, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 2.0460759493670886, |
|
"grad_norm": 0.6765018777019174, |
|
"learning_rate": 2.93543322968538e-05, |
|
"loss": 0.4564, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 2.050126582278481, |
|
"grad_norm": 0.8803289454097694, |
|
"learning_rate": 2.9304126923652428e-05, |
|
"loss": 0.4109, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 2.0541772151898736, |
|
"grad_norm": 0.6445947719593863, |
|
"learning_rate": 2.9253846616291896e-05, |
|
"loss": 0.4295, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 2.0582278481012657, |
|
"grad_norm": 0.7926558531439836, |
|
"learning_rate": 2.9203491779722896e-05, |
|
"loss": 0.4049, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 2.062278481012658, |
|
"grad_norm": 0.6923578634644288, |
|
"learning_rate": 2.9153062819496357e-05, |
|
"loss": 0.398, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 2.0663291139240507, |
|
"grad_norm": 0.7916672752848571, |
|
"learning_rate": 2.9102560141760178e-05, |
|
"loss": 0.4776, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.0703797468354432, |
|
"grad_norm": 0.762249511147647, |
|
"learning_rate": 2.9051984153256004e-05, |
|
"loss": 0.4267, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 2.0744303797468353, |
|
"grad_norm": 0.6734172160623457, |
|
"learning_rate": 2.900133526131588e-05, |
|
"loss": 0.4031, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 2.078481012658228, |
|
"grad_norm": 0.666569069754466, |
|
"learning_rate": 2.8950613873859025e-05, |
|
"loss": 0.426, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 2.0825316455696203, |
|
"grad_norm": 0.7318548727127212, |
|
"learning_rate": 2.8899820399388515e-05, |
|
"loss": 0.4111, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 2.086582278481013, |
|
"grad_norm": 0.6531571069439703, |
|
"learning_rate": 2.8848955246988012e-05, |
|
"loss": 0.4265, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 2.090632911392405, |
|
"grad_norm": 0.7370893744193006, |
|
"learning_rate": 2.879801882631847e-05, |
|
"loss": 0.4272, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 2.0946835443037974, |
|
"grad_norm": 0.5161302678630806, |
|
"learning_rate": 2.8747011547614808e-05, |
|
"loss": 0.3866, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 2.09873417721519, |
|
"grad_norm": 0.6188172550670268, |
|
"learning_rate": 2.8695933821682635e-05, |
|
"loss": 0.4439, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 2.1027848101265825, |
|
"grad_norm": 0.5277330823290988, |
|
"learning_rate": 2.864478605989494e-05, |
|
"loss": 0.4479, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 2.1068354430379745, |
|
"grad_norm": 0.526826164303142, |
|
"learning_rate": 2.8593568674188765e-05, |
|
"loss": 0.4297, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.110886075949367, |
|
"grad_norm": 0.5659587153434418, |
|
"learning_rate": 2.8542282077061892e-05, |
|
"loss": 0.4094, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 2.1149367088607596, |
|
"grad_norm": 0.4776812632179298, |
|
"learning_rate": 2.8490926681569523e-05, |
|
"loss": 0.4089, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 2.118987341772152, |
|
"grad_norm": 0.6206550439700692, |
|
"learning_rate": 2.8439502901320956e-05, |
|
"loss": 0.4559, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 2.123037974683544, |
|
"grad_norm": 0.4823479689667362, |
|
"learning_rate": 2.8388011150476237e-05, |
|
"loss": 0.417, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 2.1270886075949367, |
|
"grad_norm": 0.5714407009562481, |
|
"learning_rate": 2.8336451843742866e-05, |
|
"loss": 0.4472, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 2.131139240506329, |
|
"grad_norm": 0.571463495193429, |
|
"learning_rate": 2.8284825396372387e-05, |
|
"loss": 0.3874, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 2.1351898734177217, |
|
"grad_norm": 0.6023320897030572, |
|
"learning_rate": 2.8233132224157132e-05, |
|
"loss": 0.4463, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 2.1392405063291138, |
|
"grad_norm": 0.49290501699125205, |
|
"learning_rate": 2.8181372743426805e-05, |
|
"loss": 0.4256, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 2.1432911392405063, |
|
"grad_norm": 0.5852095470665444, |
|
"learning_rate": 2.8129547371045128e-05, |
|
"loss": 0.4156, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 2.147341772151899, |
|
"grad_norm": 0.506198241345646, |
|
"learning_rate": 2.8077656524406534e-05, |
|
"loss": 0.4307, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.1513924050632913, |
|
"grad_norm": 0.5068506337891088, |
|
"learning_rate": 2.802570062143278e-05, |
|
"loss": 0.441, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 2.1554430379746834, |
|
"grad_norm": 0.620293821013748, |
|
"learning_rate": 2.7973680080569555e-05, |
|
"loss": 0.4195, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 2.159493670886076, |
|
"grad_norm": 0.48372368335928134, |
|
"learning_rate": 2.792159532078314e-05, |
|
"loss": 0.4291, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 2.1635443037974684, |
|
"grad_norm": 0.5971576552761663, |
|
"learning_rate": 2.7869446761557033e-05, |
|
"loss": 0.4434, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 2.167594936708861, |
|
"grad_norm": 0.4124363523330016, |
|
"learning_rate": 2.781723482288857e-05, |
|
"loss": 0.3556, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 2.171645569620253, |
|
"grad_norm": 0.7354336830833796, |
|
"learning_rate": 2.7764959925285517e-05, |
|
"loss": 0.451, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 2.1756962025316455, |
|
"grad_norm": 0.4710450335947975, |
|
"learning_rate": 2.771262248976272e-05, |
|
"loss": 0.4245, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 2.179746835443038, |
|
"grad_norm": 0.6120176303324681, |
|
"learning_rate": 2.7660222937838677e-05, |
|
"loss": 0.4264, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 2.1837974683544306, |
|
"grad_norm": 0.5163584324799755, |
|
"learning_rate": 2.7607761691532186e-05, |
|
"loss": 0.4201, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 2.1878481012658226, |
|
"grad_norm": 0.4609868715747531, |
|
"learning_rate": 2.7555239173358916e-05, |
|
"loss": 0.419, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.191898734177215, |
|
"grad_norm": 0.4648557032534407, |
|
"learning_rate": 2.7502655806328e-05, |
|
"loss": 0.4142, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 2.1959493670886077, |
|
"grad_norm": 0.5026750025968879, |
|
"learning_rate": 2.7450012013938648e-05, |
|
"loss": 0.425, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 0.5571685900250907, |
|
"learning_rate": 2.739730822017673e-05, |
|
"loss": 0.452, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 2.2040506329113922, |
|
"grad_norm": 0.48602034022959506, |
|
"learning_rate": 2.7344544849511355e-05, |
|
"loss": 0.4307, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 2.2081012658227848, |
|
"grad_norm": 0.6387928780281421, |
|
"learning_rate": 2.7291722326891456e-05, |
|
"loss": 0.4448, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 2.2121518987341773, |
|
"grad_norm": 0.4237518111640032, |
|
"learning_rate": 2.723884107774236e-05, |
|
"loss": 0.386, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 2.21620253164557, |
|
"grad_norm": 0.5736309940471025, |
|
"learning_rate": 2.718590152796239e-05, |
|
"loss": 0.4411, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 2.220253164556962, |
|
"grad_norm": 0.5235452022368923, |
|
"learning_rate": 2.71329041039194e-05, |
|
"loss": 0.4384, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 2.2243037974683544, |
|
"grad_norm": 0.442924183391874, |
|
"learning_rate": 2.7079849232447357e-05, |
|
"loss": 0.3976, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 2.228354430379747, |
|
"grad_norm": 0.5099239972272517, |
|
"learning_rate": 2.7026737340842895e-05, |
|
"loss": 0.4179, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.2324050632911394, |
|
"grad_norm": 0.5052375545176062, |
|
"learning_rate": 2.697356885686189e-05, |
|
"loss": 0.4145, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 2.2364556962025315, |
|
"grad_norm": 0.5136145316142368, |
|
"learning_rate": 2.6920344208716014e-05, |
|
"loss": 0.433, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 2.240506329113924, |
|
"grad_norm": 0.5188721338607946, |
|
"learning_rate": 2.6867063825069252e-05, |
|
"loss": 0.3991, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 2.2445569620253165, |
|
"grad_norm": 0.4929423824079526, |
|
"learning_rate": 2.6813728135034494e-05, |
|
"loss": 0.4147, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 2.248607594936709, |
|
"grad_norm": 0.43869134529789583, |
|
"learning_rate": 2.6760337568170056e-05, |
|
"loss": 0.453, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 2.252658227848101, |
|
"grad_norm": 0.5305562669135508, |
|
"learning_rate": 2.6706892554476226e-05, |
|
"loss": 0.4201, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 2.2567088607594936, |
|
"grad_norm": 0.4291103077478918, |
|
"learning_rate": 2.6653393524391795e-05, |
|
"loss": 0.4028, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 2.260759493670886, |
|
"grad_norm": 0.5496072446140529, |
|
"learning_rate": 2.6599840908790592e-05, |
|
"loss": 0.4343, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 2.2648101265822786, |
|
"grad_norm": 0.5919851622624096, |
|
"learning_rate": 2.6546235138978028e-05, |
|
"loss": 0.4441, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 2.2688607594936707, |
|
"grad_norm": 0.46002331496180143, |
|
"learning_rate": 2.6492576646687597e-05, |
|
"loss": 0.3791, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.2729113924050632, |
|
"grad_norm": 0.515820405101862, |
|
"learning_rate": 2.6438865864077425e-05, |
|
"loss": 0.4657, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 2.2769620253164558, |
|
"grad_norm": 0.46693235972055935, |
|
"learning_rate": 2.6385103223726766e-05, |
|
"loss": 0.4117, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 2.2810126582278483, |
|
"grad_norm": 0.4481069296209866, |
|
"learning_rate": 2.6331289158632537e-05, |
|
"loss": 0.4014, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 2.2850632911392403, |
|
"grad_norm": 0.5103204234702331, |
|
"learning_rate": 2.6277424102205817e-05, |
|
"loss": 0.4247, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 2.289113924050633, |
|
"grad_norm": 0.5191398562160927, |
|
"learning_rate": 2.6223508488268374e-05, |
|
"loss": 0.4528, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 2.2931645569620254, |
|
"grad_norm": 0.5554501843420067, |
|
"learning_rate": 2.6169542751049148e-05, |
|
"loss": 0.4448, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 2.297215189873418, |
|
"grad_norm": 0.4794067972691308, |
|
"learning_rate": 2.6115527325180754e-05, |
|
"loss": 0.4175, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 2.30126582278481, |
|
"grad_norm": 0.5284770682154801, |
|
"learning_rate": 2.606146264569603e-05, |
|
"loss": 0.4028, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 2.3053164556962025, |
|
"grad_norm": 0.4576955902960689, |
|
"learning_rate": 2.6007349148024447e-05, |
|
"loss": 0.423, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 2.309367088607595, |
|
"grad_norm": 0.6256445694633666, |
|
"learning_rate": 2.5953187267988694e-05, |
|
"loss": 0.4063, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.3134177215189875, |
|
"grad_norm": 0.4527702438182646, |
|
"learning_rate": 2.5898977441801097e-05, |
|
"loss": 0.4564, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 2.3174683544303796, |
|
"grad_norm": 0.5754409197138856, |
|
"learning_rate": 2.584472010606015e-05, |
|
"loss": 0.417, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 2.321518987341772, |
|
"grad_norm": 0.49702441585757606, |
|
"learning_rate": 2.5790415697746976e-05, |
|
"loss": 0.4266, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 2.3255696202531646, |
|
"grad_norm": 0.5276407858371107, |
|
"learning_rate": 2.5736064654221808e-05, |
|
"loss": 0.4179, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 2.329620253164557, |
|
"grad_norm": 0.5713634802683929, |
|
"learning_rate": 2.568166741322048e-05, |
|
"loss": 0.4089, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 2.333670886075949, |
|
"grad_norm": 0.4879887662635231, |
|
"learning_rate": 2.56272244128509e-05, |
|
"loss": 0.4528, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 2.3377215189873417, |
|
"grad_norm": 0.45883825825353874, |
|
"learning_rate": 2.55727360915895e-05, |
|
"loss": 0.4525, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 2.3417721518987342, |
|
"grad_norm": 0.5679453294461484, |
|
"learning_rate": 2.5518202888277734e-05, |
|
"loss": 0.4064, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 2.3458227848101267, |
|
"grad_norm": 0.5050617920831095, |
|
"learning_rate": 2.5463625242118523e-05, |
|
"loss": 0.4176, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 2.349873417721519, |
|
"grad_norm": 0.510755999919126, |
|
"learning_rate": 2.5409003592672723e-05, |
|
"loss": 0.4203, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.3539240506329113, |
|
"grad_norm": 0.48253723890290046, |
|
"learning_rate": 2.535433837985559e-05, |
|
"loss": 0.4252, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 2.357974683544304, |
|
"grad_norm": 0.5314685702242034, |
|
"learning_rate": 2.529963004393324e-05, |
|
"loss": 0.4394, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 2.3620253164556964, |
|
"grad_norm": 0.40695718955638666, |
|
"learning_rate": 2.524487902551908e-05, |
|
"loss": 0.3726, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 2.3660759493670884, |
|
"grad_norm": 0.463752190236461, |
|
"learning_rate": 2.519008576557029e-05, |
|
"loss": 0.422, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 2.370126582278481, |
|
"grad_norm": 0.45216031924339467, |
|
"learning_rate": 2.5135250705384254e-05, |
|
"loss": 0.3975, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 2.3741772151898735, |
|
"grad_norm": 0.5167662798309056, |
|
"learning_rate": 2.5080374286595007e-05, |
|
"loss": 0.475, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 2.378227848101266, |
|
"grad_norm": 0.5146550704974726, |
|
"learning_rate": 2.5025456951169677e-05, |
|
"loss": 0.4438, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 2.382278481012658, |
|
"grad_norm": 0.5053675117234435, |
|
"learning_rate": 2.4970499141404942e-05, |
|
"loss": 0.4122, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 2.3863291139240506, |
|
"grad_norm": 0.5175711074939241, |
|
"learning_rate": 2.491550129992345e-05, |
|
"loss": 0.4047, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 2.390379746835443, |
|
"grad_norm": 0.547897820146105, |
|
"learning_rate": 2.486046386967024e-05, |
|
"loss": 0.4383, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.3944303797468356, |
|
"grad_norm": 0.5484487791080884, |
|
"learning_rate": 2.4805387293909214e-05, |
|
"loss": 0.426, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 2.3984810126582277, |
|
"grad_norm": 0.44364461317309306, |
|
"learning_rate": 2.4750272016219552e-05, |
|
"loss": 0.4327, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 2.40253164556962, |
|
"grad_norm": 0.5135478723402688, |
|
"learning_rate": 2.4695118480492114e-05, |
|
"loss": 0.4815, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 2.4065822784810127, |
|
"grad_norm": 0.38481456487375365, |
|
"learning_rate": 2.4639927130925898e-05, |
|
"loss": 0.3865, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 2.410632911392405, |
|
"grad_norm": 0.4777893427885791, |
|
"learning_rate": 2.458469841202444e-05, |
|
"loss": 0.415, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 2.4146835443037973, |
|
"grad_norm": 0.5314096595686632, |
|
"learning_rate": 2.452943276859226e-05, |
|
"loss": 0.4543, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 2.41873417721519, |
|
"grad_norm": 0.41476186903595336, |
|
"learning_rate": 2.447413064573125e-05, |
|
"loss": 0.4089, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 2.4227848101265823, |
|
"grad_norm": 0.563867337576708, |
|
"learning_rate": 2.4418792488837095e-05, |
|
"loss": 0.4291, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 2.426835443037975, |
|
"grad_norm": 0.46611160929941864, |
|
"learning_rate": 2.4363418743595713e-05, |
|
"loss": 0.4224, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 2.430886075949367, |
|
"grad_norm": 0.5245990958557222, |
|
"learning_rate": 2.430800985597963e-05, |
|
"loss": 0.4184, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.4349367088607594, |
|
"grad_norm": 0.5218662026813128, |
|
"learning_rate": 2.4252566272244415e-05, |
|
"loss": 0.4533, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 2.438987341772152, |
|
"grad_norm": 0.43294194093606214, |
|
"learning_rate": 2.4197088438925063e-05, |
|
"loss": 0.4109, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 2.4430379746835444, |
|
"grad_norm": 0.5254308606399737, |
|
"learning_rate": 2.4141576802832417e-05, |
|
"loss": 0.4385, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 2.4470886075949365, |
|
"grad_norm": 0.538540636307333, |
|
"learning_rate": 2.408603181104957e-05, |
|
"loss": 0.4416, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 2.451139240506329, |
|
"grad_norm": 0.6294670588310088, |
|
"learning_rate": 2.4030453910928245e-05, |
|
"loss": 0.4083, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 2.4551898734177215, |
|
"grad_norm": 0.5503499155494198, |
|
"learning_rate": 2.397484355008521e-05, |
|
"loss": 0.4399, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 2.459240506329114, |
|
"grad_norm": 0.5976174071257218, |
|
"learning_rate": 2.3919201176398662e-05, |
|
"loss": 0.3977, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 2.463291139240506, |
|
"grad_norm": 0.5474029485588765, |
|
"learning_rate": 2.3863527238004633e-05, |
|
"loss": 0.4586, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 2.4673417721518986, |
|
"grad_norm": 0.505896637112045, |
|
"learning_rate": 2.380782218329337e-05, |
|
"loss": 0.4123, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 2.471392405063291, |
|
"grad_norm": 0.5299157803399326, |
|
"learning_rate": 2.3752086460905725e-05, |
|
"loss": 0.4132, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.4754430379746837, |
|
"grad_norm": 0.5178584460148602, |
|
"learning_rate": 2.3696320519729544e-05, |
|
"loss": 0.4547, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 2.479493670886076, |
|
"grad_norm": 0.45346216954914387, |
|
"learning_rate": 2.3640524808896045e-05, |
|
"loss": 0.4125, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 2.4835443037974683, |
|
"grad_norm": 0.50329324502656, |
|
"learning_rate": 2.3584699777776222e-05, |
|
"loss": 0.4664, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 2.487594936708861, |
|
"grad_norm": 0.478586565685544, |
|
"learning_rate": 2.3528845875977195e-05, |
|
"loss": 0.4052, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 2.4916455696202533, |
|
"grad_norm": 0.5387775050728358, |
|
"learning_rate": 2.3472963553338614e-05, |
|
"loss": 0.4401, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 2.4956962025316454, |
|
"grad_norm": 0.5005048245577457, |
|
"learning_rate": 2.341705325992901e-05, |
|
"loss": 0.4172, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 2.499746835443038, |
|
"grad_norm": 0.5229740264246426, |
|
"learning_rate": 2.336111544604222e-05, |
|
"loss": 0.4227, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 2.5037974683544304, |
|
"grad_norm": 0.6302167772320204, |
|
"learning_rate": 2.33051505621937e-05, |
|
"loss": 0.4447, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 2.507848101265823, |
|
"grad_norm": 0.5436111576599202, |
|
"learning_rate": 2.324915905911693e-05, |
|
"loss": 0.4296, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 2.5118987341772154, |
|
"grad_norm": 0.5476475608217171, |
|
"learning_rate": 2.319314138775977e-05, |
|
"loss": 0.4399, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.5159493670886075, |
|
"grad_norm": 0.5397928279106226, |
|
"learning_rate": 2.3137097999280856e-05, |
|
"loss": 0.4329, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 0.5160156913624833, |
|
"learning_rate": 2.308102934504593e-05, |
|
"loss": 0.4102, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 2.5240506329113925, |
|
"grad_norm": 0.5224245667818569, |
|
"learning_rate": 2.3024935876624222e-05, |
|
"loss": 0.4424, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 2.5281012658227846, |
|
"grad_norm": 0.5915114445268438, |
|
"learning_rate": 2.2968818045784813e-05, |
|
"loss": 0.4546, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 2.532151898734177, |
|
"grad_norm": 0.4915564057627853, |
|
"learning_rate": 2.2912676304493006e-05, |
|
"loss": 0.4013, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 2.5362025316455696, |
|
"grad_norm": 0.49364306545983133, |
|
"learning_rate": 2.2856511104906668e-05, |
|
"loss": 0.4164, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 2.540253164556962, |
|
"grad_norm": 0.6894660383449082, |
|
"learning_rate": 2.2800322899372586e-05, |
|
"loss": 0.4588, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 2.5443037974683547, |
|
"grad_norm": 0.43836373241938154, |
|
"learning_rate": 2.2744112140422844e-05, |
|
"loss": 0.4143, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 2.5483544303797467, |
|
"grad_norm": 0.6938059429711972, |
|
"learning_rate": 2.2687879280771177e-05, |
|
"loss": 0.4428, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 2.5524050632911393, |
|
"grad_norm": 0.4645166808771536, |
|
"learning_rate": 2.26316247733093e-05, |
|
"loss": 0.427, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.5564556962025318, |
|
"grad_norm": 0.4948625857349155, |
|
"learning_rate": 2.257534907110328e-05, |
|
"loss": 0.3948, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 2.560506329113924, |
|
"grad_norm": 0.5284966188277499, |
|
"learning_rate": 2.2519052627389882e-05, |
|
"loss": 0.4463, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 2.5645569620253164, |
|
"grad_norm": 0.4374004123293572, |
|
"learning_rate": 2.246273589557294e-05, |
|
"loss": 0.4215, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 2.568607594936709, |
|
"grad_norm": 0.5418965532293888, |
|
"learning_rate": 2.240639932921966e-05, |
|
"loss": 0.426, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 2.5726582278481014, |
|
"grad_norm": 0.5048948405659673, |
|
"learning_rate": 2.2350043382056995e-05, |
|
"loss": 0.4287, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 2.576708860759494, |
|
"grad_norm": 0.5561873751256334, |
|
"learning_rate": 2.2293668507968015e-05, |
|
"loss": 0.4308, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 2.580759493670886, |
|
"grad_norm": 0.5534074959689884, |
|
"learning_rate": 2.2237275160988186e-05, |
|
"loss": 0.4361, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 2.5848101265822785, |
|
"grad_norm": 0.46835569965110785, |
|
"learning_rate": 2.2180863795301787e-05, |
|
"loss": 0.4018, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 2.588860759493671, |
|
"grad_norm": 0.5929654329132523, |
|
"learning_rate": 2.212443486523819e-05, |
|
"loss": 0.4592, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 2.592911392405063, |
|
"grad_norm": 0.4811937995143609, |
|
"learning_rate": 2.2067988825268243e-05, |
|
"loss": 0.4381, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.5969620253164556, |
|
"grad_norm": 0.49060233113020635, |
|
"learning_rate": 2.2011526130000596e-05, |
|
"loss": 0.431, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 2.601012658227848, |
|
"grad_norm": 0.45777530609931627, |
|
"learning_rate": 2.1955047234178038e-05, |
|
"loss": 0.4005, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 2.6050632911392406, |
|
"grad_norm": 0.4695746149222098, |
|
"learning_rate": 2.1898552592673825e-05, |
|
"loss": 0.4425, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 2.609113924050633, |
|
"grad_norm": 0.4115787397126848, |
|
"learning_rate": 2.184204266048803e-05, |
|
"loss": 0.4201, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 2.613164556962025, |
|
"grad_norm": 0.5078763118274502, |
|
"learning_rate": 2.1785517892743887e-05, |
|
"loss": 0.4069, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 2.6172151898734177, |
|
"grad_norm": 0.4273954417678827, |
|
"learning_rate": 2.17289787446841e-05, |
|
"loss": 0.4195, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 2.6212658227848102, |
|
"grad_norm": 0.44591613714345696, |
|
"learning_rate": 2.1672425671667198e-05, |
|
"loss": 0.422, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 2.6253164556962023, |
|
"grad_norm": 0.4362523859939479, |
|
"learning_rate": 2.161585912916385e-05, |
|
"loss": 0.4694, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 2.629367088607595, |
|
"grad_norm": 0.5523928625232588, |
|
"learning_rate": 2.1559279572753214e-05, |
|
"loss": 0.4125, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 2.6334177215189873, |
|
"grad_norm": 0.4262504859483176, |
|
"learning_rate": 2.1502687458119268e-05, |
|
"loss": 0.4147, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.63746835443038, |
|
"grad_norm": 0.5246692545073564, |
|
"learning_rate": 2.1446083241047116e-05, |
|
"loss": 0.4216, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 2.6415189873417724, |
|
"grad_norm": 0.4187169384117014, |
|
"learning_rate": 2.1389467377419333e-05, |
|
"loss": 0.4254, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 2.6455696202531644, |
|
"grad_norm": 0.5520031985944907, |
|
"learning_rate": 2.133284032321232e-05, |
|
"loss": 0.413, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 2.649620253164557, |
|
"grad_norm": 0.4364832571112007, |
|
"learning_rate": 2.1276202534492566e-05, |
|
"loss": 0.4911, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 2.6536708860759495, |
|
"grad_norm": 0.5837304367617978, |
|
"learning_rate": 2.121955446741306e-05, |
|
"loss": 0.4335, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 2.6577215189873415, |
|
"grad_norm": 0.42182818870440225, |
|
"learning_rate": 2.1162896578209517e-05, |
|
"loss": 0.3969, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 2.661772151898734, |
|
"grad_norm": 0.5223140262289049, |
|
"learning_rate": 2.1106229323196813e-05, |
|
"loss": 0.3929, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 2.6658227848101266, |
|
"grad_norm": 0.4263713527540944, |
|
"learning_rate": 2.1049553158765214e-05, |
|
"loss": 0.418, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 2.669873417721519, |
|
"grad_norm": 0.50615674764645, |
|
"learning_rate": 2.0992868541376764e-05, |
|
"loss": 0.4461, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 2.6739240506329116, |
|
"grad_norm": 0.46493055477756295, |
|
"learning_rate": 2.093617592756158e-05, |
|
"loss": 0.3913, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.6779746835443037, |
|
"grad_norm": 0.6454122836252628, |
|
"learning_rate": 2.0879475773914167e-05, |
|
"loss": 0.4771, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 2.682025316455696, |
|
"grad_norm": 0.44038115804161615, |
|
"learning_rate": 2.082276853708978e-05, |
|
"loss": 0.4447, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 2.6860759493670887, |
|
"grad_norm": 0.49278326324220073, |
|
"learning_rate": 2.076605467380071e-05, |
|
"loss": 0.3963, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 2.690126582278481, |
|
"grad_norm": 0.48054995367015046, |
|
"learning_rate": 2.0709334640812613e-05, |
|
"loss": 0.4461, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 2.6941772151898733, |
|
"grad_norm": 0.46431890372480084, |
|
"learning_rate": 2.0652608894940824e-05, |
|
"loss": 0.4214, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 2.698227848101266, |
|
"grad_norm": 0.4616051784241901, |
|
"learning_rate": 2.0595877893046722e-05, |
|
"loss": 0.4365, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 2.7022784810126583, |
|
"grad_norm": 0.45363804098972893, |
|
"learning_rate": 2.0539142092033985e-05, |
|
"loss": 0.4318, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 2.706329113924051, |
|
"grad_norm": 0.43506862238000593, |
|
"learning_rate": 2.048240194884496e-05, |
|
"loss": 0.3989, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 2.710379746835443, |
|
"grad_norm": 0.44200371636145447, |
|
"learning_rate": 2.042565792045695e-05, |
|
"loss": 0.4181, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 2.7144303797468354, |
|
"grad_norm": 0.4109495179049366, |
|
"learning_rate": 2.036891046387857e-05, |
|
"loss": 0.415, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.718481012658228, |
|
"grad_norm": 0.40890495081279765, |
|
"learning_rate": 2.0312160036146036e-05, |
|
"loss": 0.4393, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 2.72253164556962, |
|
"grad_norm": 0.4350327409499441, |
|
"learning_rate": 2.025540709431948e-05, |
|
"loss": 0.4195, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 2.7265822784810125, |
|
"grad_norm": 0.4205484729539491, |
|
"learning_rate": 2.0198652095479298e-05, |
|
"loss": 0.4136, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 2.730632911392405, |
|
"grad_norm": 0.45423270266954274, |
|
"learning_rate": 2.014189549672245e-05, |
|
"loss": 0.4305, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 2.7346835443037976, |
|
"grad_norm": 0.5089851027866287, |
|
"learning_rate": 2.0085137755158776e-05, |
|
"loss": 0.442, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 2.73873417721519, |
|
"grad_norm": 0.43368480244491125, |
|
"learning_rate": 2.0028379327907327e-05, |
|
"loss": 0.4111, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 2.742784810126582, |
|
"grad_norm": 0.48041327277393836, |
|
"learning_rate": 1.9971620672092676e-05, |
|
"loss": 0.4357, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 2.7468354430379747, |
|
"grad_norm": 0.4590330285510384, |
|
"learning_rate": 1.991486224484123e-05, |
|
"loss": 0.4044, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 2.750886075949367, |
|
"grad_norm": 0.5170465456530927, |
|
"learning_rate": 1.985810450327756e-05, |
|
"loss": 0.4382, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 2.7549367088607593, |
|
"grad_norm": 0.430985054945564, |
|
"learning_rate": 1.9801347904520706e-05, |
|
"loss": 0.4207, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.7589873417721518, |
|
"grad_norm": 0.44900709883801937, |
|
"learning_rate": 1.974459290568053e-05, |
|
"loss": 0.4257, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 2.7630379746835443, |
|
"grad_norm": 0.4222542201533496, |
|
"learning_rate": 1.968783996385397e-05, |
|
"loss": 0.4302, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 2.767088607594937, |
|
"grad_norm": 0.4686751275774582, |
|
"learning_rate": 1.963108953612143e-05, |
|
"loss": 0.4411, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 2.7711392405063293, |
|
"grad_norm": 0.41717342228974236, |
|
"learning_rate": 1.9574342079543056e-05, |
|
"loss": 0.4363, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 2.7751898734177214, |
|
"grad_norm": 0.44236149557329285, |
|
"learning_rate": 1.9517598051155046e-05, |
|
"loss": 0.4263, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 2.779240506329114, |
|
"grad_norm": 0.44673292216211224, |
|
"learning_rate": 1.9460857907966025e-05, |
|
"loss": 0.4333, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 2.7832911392405064, |
|
"grad_norm": 0.5201178283697823, |
|
"learning_rate": 1.9404122106953285e-05, |
|
"loss": 0.4697, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 2.7873417721518985, |
|
"grad_norm": 0.41882997985859777, |
|
"learning_rate": 1.9347391105059176e-05, |
|
"loss": 0.3508, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 2.791392405063291, |
|
"grad_norm": 0.3943793784690471, |
|
"learning_rate": 1.92906653591874e-05, |
|
"loss": 0.4108, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 2.7954430379746835, |
|
"grad_norm": 0.5110511694167242, |
|
"learning_rate": 1.9233945326199295e-05, |
|
"loss": 0.4396, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.799493670886076, |
|
"grad_norm": 0.46838798967898804, |
|
"learning_rate": 1.917723146291022e-05, |
|
"loss": 0.429, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 2.8035443037974686, |
|
"grad_norm": 0.45694500079209743, |
|
"learning_rate": 1.912052422608584e-05, |
|
"loss": 0.3628, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 2.8075949367088606, |
|
"grad_norm": 0.5353832657560298, |
|
"learning_rate": 1.9063824072438428e-05, |
|
"loss": 0.5053, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 2.811645569620253, |
|
"grad_norm": 0.3818593107768754, |
|
"learning_rate": 1.9007131458623246e-05, |
|
"loss": 0.3723, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 2.8156962025316457, |
|
"grad_norm": 0.516333215692529, |
|
"learning_rate": 1.895044684123479e-05, |
|
"loss": 0.4507, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 2.8197468354430377, |
|
"grad_norm": 0.4566475555077562, |
|
"learning_rate": 1.8893770676803194e-05, |
|
"loss": 0.447, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 2.8237974683544302, |
|
"grad_norm": 0.4220317964350597, |
|
"learning_rate": 1.8837103421790486e-05, |
|
"loss": 0.3643, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 2.8278481012658228, |
|
"grad_norm": 0.3991236463589033, |
|
"learning_rate": 1.8780445532586952e-05, |
|
"loss": 0.4225, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 2.8318987341772153, |
|
"grad_norm": 0.48203555049805974, |
|
"learning_rate": 1.872379746550743e-05, |
|
"loss": 0.4452, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 2.835949367088608, |
|
"grad_norm": 0.3911662757767854, |
|
"learning_rate": 1.866715967678769e-05, |
|
"loss": 0.4129, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 0.4599643238616371, |
|
"learning_rate": 1.861053262258067e-05, |
|
"loss": 0.4303, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 2.8440506329113924, |
|
"grad_norm": 0.4768862889654696, |
|
"learning_rate": 1.8553916758952897e-05, |
|
"loss": 0.4509, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 2.848101265822785, |
|
"grad_norm": 0.4191040578074366, |
|
"learning_rate": 1.8497312541880735e-05, |
|
"loss": 0.4104, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 2.852151898734177, |
|
"grad_norm": 0.43007727269554547, |
|
"learning_rate": 1.8440720427246786e-05, |
|
"loss": 0.4249, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 2.8562025316455695, |
|
"grad_norm": 0.3936398909142753, |
|
"learning_rate": 1.8384140870836157e-05, |
|
"loss": 0.3869, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 2.860253164556962, |
|
"grad_norm": 0.46440397631152613, |
|
"learning_rate": 1.8327574328332806e-05, |
|
"loss": 0.4482, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 2.8643037974683545, |
|
"grad_norm": 0.46841118573607593, |
|
"learning_rate": 1.8271021255315906e-05, |
|
"loss": 0.4251, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 2.868354430379747, |
|
"grad_norm": 0.4786769601565617, |
|
"learning_rate": 1.8214482107256117e-05, |
|
"loss": 0.4287, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 2.872405063291139, |
|
"grad_norm": 0.5083024534331989, |
|
"learning_rate": 1.8157957339511968e-05, |
|
"loss": 0.4321, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 2.8764556962025316, |
|
"grad_norm": 0.39844698142372803, |
|
"learning_rate": 1.8101447407326182e-05, |
|
"loss": 0.3913, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.880506329113924, |
|
"grad_norm": 0.5110178760120426, |
|
"learning_rate": 1.8044952765821966e-05, |
|
"loss": 0.4722, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 2.884556962025316, |
|
"grad_norm": 0.3740285037848518, |
|
"learning_rate": 1.7988473869999407e-05, |
|
"loss": 0.4103, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 2.8886075949367087, |
|
"grad_norm": 0.3939554318050098, |
|
"learning_rate": 1.7932011174731764e-05, |
|
"loss": 0.4194, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 2.8926582278481012, |
|
"grad_norm": 0.45997200521586756, |
|
"learning_rate": 1.7875565134761817e-05, |
|
"loss": 0.4364, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 2.8967088607594937, |
|
"grad_norm": 0.4374193953402831, |
|
"learning_rate": 1.7819136204698226e-05, |
|
"loss": 0.4346, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 2.9007594936708863, |
|
"grad_norm": 0.4289640134921434, |
|
"learning_rate": 1.776272483901182e-05, |
|
"loss": 0.4055, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 2.9048101265822783, |
|
"grad_norm": 0.4332326575462663, |
|
"learning_rate": 1.7706331492031995e-05, |
|
"loss": 0.4455, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 2.908860759493671, |
|
"grad_norm": 0.44525928426212763, |
|
"learning_rate": 1.764995661794301e-05, |
|
"loss": 0.4345, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 2.9129113924050634, |
|
"grad_norm": 0.4403732438043824, |
|
"learning_rate": 1.759360067078035e-05, |
|
"loss": 0.4412, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 2.9169620253164554, |
|
"grad_norm": 0.40606386814783024, |
|
"learning_rate": 1.7537264104427064e-05, |
|
"loss": 0.4123, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.921012658227848, |
|
"grad_norm": 0.4778179138949842, |
|
"learning_rate": 1.748094737261012e-05, |
|
"loss": 0.4284, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 2.9250632911392405, |
|
"grad_norm": 0.44245927491376297, |
|
"learning_rate": 1.7424650928896726e-05, |
|
"loss": 0.4465, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 2.929113924050633, |
|
"grad_norm": 0.4001547520208834, |
|
"learning_rate": 1.7368375226690712e-05, |
|
"loss": 0.3922, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 2.9331645569620255, |
|
"grad_norm": 0.43694988727088135, |
|
"learning_rate": 1.731212071922883e-05, |
|
"loss": 0.43, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 2.9372151898734176, |
|
"grad_norm": 0.45338070460280505, |
|
"learning_rate": 1.7255887859577156e-05, |
|
"loss": 0.4413, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 2.94126582278481, |
|
"grad_norm": 0.38553607813193075, |
|
"learning_rate": 1.7199677100627427e-05, |
|
"loss": 0.4221, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 2.9453164556962026, |
|
"grad_norm": 0.4318648928711469, |
|
"learning_rate": 1.7143488895093343e-05, |
|
"loss": 0.4043, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 2.9493670886075947, |
|
"grad_norm": 0.45386906381116193, |
|
"learning_rate": 1.7087323695506994e-05, |
|
"loss": 0.4452, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 2.953417721518987, |
|
"grad_norm": 0.4239722502822553, |
|
"learning_rate": 1.7031181954215194e-05, |
|
"loss": 0.4506, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 2.9574683544303797, |
|
"grad_norm": 0.36884728762611646, |
|
"learning_rate": 1.6975064123375788e-05, |
|
"loss": 0.415, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.961518987341772, |
|
"grad_norm": 0.4516428249567874, |
|
"learning_rate": 1.6918970654954084e-05, |
|
"loss": 0.4218, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 2.9655696202531647, |
|
"grad_norm": 0.4362085473574307, |
|
"learning_rate": 1.686290200071915e-05, |
|
"loss": 0.406, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 2.969620253164557, |
|
"grad_norm": 0.41409491220705485, |
|
"learning_rate": 1.6806858612240234e-05, |
|
"loss": 0.4281, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 2.9736708860759493, |
|
"grad_norm": 0.3920203791761346, |
|
"learning_rate": 1.6750840940883078e-05, |
|
"loss": 0.3778, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 2.977721518987342, |
|
"grad_norm": 0.47155656621277625, |
|
"learning_rate": 1.6694849437806305e-05, |
|
"loss": 0.4488, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 2.981772151898734, |
|
"grad_norm": 0.4296912676968355, |
|
"learning_rate": 1.663888455395778e-05, |
|
"loss": 0.4273, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 2.9858227848101264, |
|
"grad_norm": 0.4480172463797977, |
|
"learning_rate": 1.6582946740070995e-05, |
|
"loss": 0.4276, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 2.989873417721519, |
|
"grad_norm": 0.4337399689862748, |
|
"learning_rate": 1.6527036446661396e-05, |
|
"loss": 0.4166, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 2.9939240506329114, |
|
"grad_norm": 0.4492701388484977, |
|
"learning_rate": 1.6471154124022818e-05, |
|
"loss": 0.4232, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 2.997974683544304, |
|
"grad_norm": 0.5279432349461561, |
|
"learning_rate": 1.6415300222223788e-05, |
|
"loss": 0.4507, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 3.002278481012658, |
|
"grad_norm": 0.5435228850813848, |
|
"learning_rate": 1.6359475191103958e-05, |
|
"loss": 0.4137, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 3.0063291139240507, |
|
"grad_norm": 0.604414606583873, |
|
"learning_rate": 1.6303679480270466e-05, |
|
"loss": 0.3232, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 3.010379746835443, |
|
"grad_norm": 0.48163801111220456, |
|
"learning_rate": 1.624791353909428e-05, |
|
"loss": 0.3124, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 3.0144303797468353, |
|
"grad_norm": 0.9851153042026485, |
|
"learning_rate": 1.619217781670663e-05, |
|
"loss": 0.3626, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 3.0184810126582278, |
|
"grad_norm": 0.5982138701774492, |
|
"learning_rate": 1.6136472761995373e-05, |
|
"loss": 0.2963, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 3.0225316455696203, |
|
"grad_norm": 0.5821064449073874, |
|
"learning_rate": 1.608079882360134e-05, |
|
"loss": 0.2883, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 3.026582278481013, |
|
"grad_norm": 0.614472316780645, |
|
"learning_rate": 1.60251564499148e-05, |
|
"loss": 0.3449, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 3.030632911392405, |
|
"grad_norm": 0.5747630732909984, |
|
"learning_rate": 1.596954608907176e-05, |
|
"loss": 0.3046, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 3.0346835443037974, |
|
"grad_norm": 0.5544277401745573, |
|
"learning_rate": 1.591396818895043e-05, |
|
"loss": 0.3288, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 3.03873417721519, |
|
"grad_norm": 0.5258749435121917, |
|
"learning_rate": 1.585842319716759e-05, |
|
"loss": 0.2985, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 3.0427848101265824, |
|
"grad_norm": 0.5496764025334598, |
|
"learning_rate": 1.5802911561074944e-05, |
|
"loss": 0.3313, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 3.0468354430379745, |
|
"grad_norm": 0.5701222179147744, |
|
"learning_rate": 1.5747433727755595e-05, |
|
"loss": 0.3414, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 3.050886075949367, |
|
"grad_norm": 0.5735600634097834, |
|
"learning_rate": 1.5691990144020376e-05, |
|
"loss": 0.3385, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 3.0549367088607595, |
|
"grad_norm": 0.4739633390701903, |
|
"learning_rate": 1.5636581256404297e-05, |
|
"loss": 0.3173, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 3.058987341772152, |
|
"grad_norm": 0.5191012310730974, |
|
"learning_rate": 1.558120751116291e-05, |
|
"loss": 0.2903, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 3.063037974683544, |
|
"grad_norm": 0.4477710795677636, |
|
"learning_rate": 1.552586935426876e-05, |
|
"loss": 0.3032, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 3.0670886075949366, |
|
"grad_norm": 0.4656777586635711, |
|
"learning_rate": 1.547056723140774e-05, |
|
"loss": 0.302, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 3.071139240506329, |
|
"grad_norm": 0.4429864297109438, |
|
"learning_rate": 1.5415301587975565e-05, |
|
"loss": 0.2898, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 3.0751898734177217, |
|
"grad_norm": 0.42844195129136176, |
|
"learning_rate": 1.536007286907411e-05, |
|
"loss": 0.3098, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 3.0792405063291137, |
|
"grad_norm": 0.4635168330171718, |
|
"learning_rate": 1.5304881519507896e-05, |
|
"loss": 0.3473, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 3.0832911392405062, |
|
"grad_norm": 0.4322689450631132, |
|
"learning_rate": 1.5249727983780453e-05, |
|
"loss": 0.3136, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 3.0873417721518988, |
|
"grad_norm": 0.41787830266911113, |
|
"learning_rate": 1.5194612706090786e-05, |
|
"loss": 0.2859, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 3.0913924050632913, |
|
"grad_norm": 0.422515499496248, |
|
"learning_rate": 1.5139536130329771e-05, |
|
"loss": 0.3295, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 3.0954430379746833, |
|
"grad_norm": 0.5221909278069066, |
|
"learning_rate": 1.508449870007656e-05, |
|
"loss": 0.3313, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 3.099493670886076, |
|
"grad_norm": 0.41427544720144466, |
|
"learning_rate": 1.5029500858595056e-05, |
|
"loss": 0.3099, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 3.1035443037974684, |
|
"grad_norm": 0.4743981221066582, |
|
"learning_rate": 1.4974543048830328e-05, |
|
"loss": 0.2962, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 3.107594936708861, |
|
"grad_norm": 0.5494119848697887, |
|
"learning_rate": 1.4919625713405e-05, |
|
"loss": 0.3562, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 3.111645569620253, |
|
"grad_norm": 0.4153540928529232, |
|
"learning_rate": 1.4864749294615756e-05, |
|
"loss": 0.3085, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 3.1156962025316455, |
|
"grad_norm": 0.559706929841413, |
|
"learning_rate": 1.4809914234429716e-05, |
|
"loss": 0.3242, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 3.119746835443038, |
|
"grad_norm": 0.36634190684195883, |
|
"learning_rate": 1.4755120974480923e-05, |
|
"loss": 0.2768, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 3.1237974683544305, |
|
"grad_norm": 0.4879025480186631, |
|
"learning_rate": 1.4700369956066771e-05, |
|
"loss": 0.3187, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 3.1278481012658226, |
|
"grad_norm": 0.41376143800475335, |
|
"learning_rate": 1.4645661620144413e-05, |
|
"loss": 0.3039, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 3.131898734177215, |
|
"grad_norm": 0.35342875939225266, |
|
"learning_rate": 1.4590996407327284e-05, |
|
"loss": 0.2762, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 3.1359493670886076, |
|
"grad_norm": 0.4438032153245839, |
|
"learning_rate": 1.4536374757881487e-05, |
|
"loss": 0.3485, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"grad_norm": 0.35241390747136775, |
|
"learning_rate": 1.4481797111722271e-05, |
|
"loss": 0.3091, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 3.1440506329113926, |
|
"grad_norm": 0.4065875013137187, |
|
"learning_rate": 1.4427263908410507e-05, |
|
"loss": 0.3075, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 3.1481012658227847, |
|
"grad_norm": 0.4110147622112397, |
|
"learning_rate": 1.4372775587149108e-05, |
|
"loss": 0.3245, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 3.1521518987341772, |
|
"grad_norm": 0.3944143900112115, |
|
"learning_rate": 1.4318332586779522e-05, |
|
"loss": 0.3105, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 3.1562025316455697, |
|
"grad_norm": 0.4109490086005844, |
|
"learning_rate": 1.4263935345778202e-05, |
|
"loss": 0.3374, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 3.160253164556962, |
|
"grad_norm": 0.39422070971417655, |
|
"learning_rate": 1.420958430225303e-05, |
|
"loss": 0.2846, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 3.1643037974683543, |
|
"grad_norm": 0.45685276487464277, |
|
"learning_rate": 1.415527989393985e-05, |
|
"loss": 0.3729, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 3.168354430379747, |
|
"grad_norm": 0.39226580253722354, |
|
"learning_rate": 1.410102255819891e-05, |
|
"loss": 0.307, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 3.1724050632911394, |
|
"grad_norm": 0.36862089990628044, |
|
"learning_rate": 1.404681273201131e-05, |
|
"loss": 0.2833, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 3.176455696202532, |
|
"grad_norm": 0.36724871314143875, |
|
"learning_rate": 1.399265085197556e-05, |
|
"loss": 0.2937, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 3.180506329113924, |
|
"grad_norm": 0.4028911299417396, |
|
"learning_rate": 1.393853735430398e-05, |
|
"loss": 0.3311, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 3.1845569620253165, |
|
"grad_norm": 0.3752694837143601, |
|
"learning_rate": 1.3884472674819246e-05, |
|
"loss": 0.275, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 3.188607594936709, |
|
"grad_norm": 0.45484344922502, |
|
"learning_rate": 1.3830457248950864e-05, |
|
"loss": 0.3679, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 3.192658227848101, |
|
"grad_norm": 0.3996383400086033, |
|
"learning_rate": 1.377649151173163e-05, |
|
"loss": 0.3016, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 3.1967088607594936, |
|
"grad_norm": 0.353012788320879, |
|
"learning_rate": 1.3722575897794181e-05, |
|
"loss": 0.2824, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 3.200759493670886, |
|
"grad_norm": 0.5240495935767944, |
|
"learning_rate": 1.3668710841367472e-05, |
|
"loss": 0.3503, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 3.2048101265822786, |
|
"grad_norm": 0.41257741995548286, |
|
"learning_rate": 1.361489677627324e-05, |
|
"loss": 0.314, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 3.208860759493671, |
|
"grad_norm": 0.45252183755387493, |
|
"learning_rate": 1.3561134135922585e-05, |
|
"loss": 0.3111, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 3.212911392405063, |
|
"grad_norm": 0.46874710378796675, |
|
"learning_rate": 1.350742335331241e-05, |
|
"loss": 0.2772, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 3.2169620253164557, |
|
"grad_norm": 0.40497819652850203, |
|
"learning_rate": 1.345376486102198e-05, |
|
"loss": 0.3201, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 3.221012658227848, |
|
"grad_norm": 0.47615234825371283, |
|
"learning_rate": 1.3400159091209414e-05, |
|
"loss": 0.3005, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 3.2250632911392403, |
|
"grad_norm": 0.4721333297752663, |
|
"learning_rate": 1.3346606475608216e-05, |
|
"loss": 0.3455, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 3.229113924050633, |
|
"grad_norm": 0.40553171925494313, |
|
"learning_rate": 1.3293107445523781e-05, |
|
"loss": 0.3053, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 3.2331645569620253, |
|
"grad_norm": 0.4306875564715964, |
|
"learning_rate": 1.3239662431829949e-05, |
|
"loss": 0.3099, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 3.237215189873418, |
|
"grad_norm": 0.3943308454107528, |
|
"learning_rate": 1.3186271864965509e-05, |
|
"loss": 0.3265, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 3.2412658227848103, |
|
"grad_norm": 0.41881553475907457, |
|
"learning_rate": 1.3132936174930756e-05, |
|
"loss": 0.2826, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 3.2453164556962024, |
|
"grad_norm": 0.44267610267618235, |
|
"learning_rate": 1.3079655791283995e-05, |
|
"loss": 0.3351, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 3.249367088607595, |
|
"grad_norm": 0.3506535452045794, |
|
"learning_rate": 1.3026431143138108e-05, |
|
"loss": 0.2785, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 3.2534177215189874, |
|
"grad_norm": 0.43581929890756543, |
|
"learning_rate": 1.2973262659157114e-05, |
|
"loss": 0.3382, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 3.2574683544303795, |
|
"grad_norm": 0.37154687117216706, |
|
"learning_rate": 1.2920150767552651e-05, |
|
"loss": 0.3116, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 3.261518987341772, |
|
"grad_norm": 0.3430543423212978, |
|
"learning_rate": 1.2867095896080607e-05, |
|
"loss": 0.2939, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 3.2655696202531646, |
|
"grad_norm": 0.3843894861392431, |
|
"learning_rate": 1.2814098472037612e-05, |
|
"loss": 0.3218, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 3.269620253164557, |
|
"grad_norm": 0.40872920198922646, |
|
"learning_rate": 1.276115892225764e-05, |
|
"loss": 0.343, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 3.2736708860759496, |
|
"grad_norm": 0.3703859728727641, |
|
"learning_rate": 1.2708277673108555e-05, |
|
"loss": 0.3153, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 3.2777215189873417, |
|
"grad_norm": 0.38962731108117654, |
|
"learning_rate": 1.2655455150488649e-05, |
|
"loss": 0.2894, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 3.281772151898734, |
|
"grad_norm": 0.38742007118472005, |
|
"learning_rate": 1.2602691779823272e-05, |
|
"loss": 0.3228, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 3.2858227848101267, |
|
"grad_norm": 0.42676293577047186, |
|
"learning_rate": 1.2549987986061355e-05, |
|
"loss": 0.3121, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 3.2898734177215188, |
|
"grad_norm": 0.4631551480685098, |
|
"learning_rate": 1.2497344193672005e-05, |
|
"loss": 0.3385, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 3.2939240506329113, |
|
"grad_norm": 0.3530408609142979, |
|
"learning_rate": 1.2444760826641092e-05, |
|
"loss": 0.2843, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 3.297974683544304, |
|
"grad_norm": 0.4306928763529447, |
|
"learning_rate": 1.2392238308467817e-05, |
|
"loss": 0.3054, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 3.3020253164556963, |
|
"grad_norm": 0.42125717438572935, |
|
"learning_rate": 1.2339777062161326e-05, |
|
"loss": 0.3156, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 3.306075949367089, |
|
"grad_norm": 0.38672961769253583, |
|
"learning_rate": 1.2287377510237293e-05, |
|
"loss": 0.3139, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 3.310126582278481, |
|
"grad_norm": 0.4874364841461359, |
|
"learning_rate": 1.2235040074714488e-05, |
|
"loss": 0.3387, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 3.3141772151898734, |
|
"grad_norm": 0.34802671044669065, |
|
"learning_rate": 1.2182765177111434e-05, |
|
"loss": 0.2925, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 3.318227848101266, |
|
"grad_norm": 0.4471969470085418, |
|
"learning_rate": 1.213055323844297e-05, |
|
"loss": 0.3293, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 3.322278481012658, |
|
"grad_norm": 0.40145283221051326, |
|
"learning_rate": 1.2078404679216864e-05, |
|
"loss": 0.3275, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 3.3263291139240505, |
|
"grad_norm": 0.33572566019107836, |
|
"learning_rate": 1.2026319919430458e-05, |
|
"loss": 0.3028, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 3.330379746835443, |
|
"grad_norm": 0.41685112647713757, |
|
"learning_rate": 1.1974299378567227e-05, |
|
"loss": 0.3321, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 3.3344303797468355, |
|
"grad_norm": 0.36536020070251285, |
|
"learning_rate": 1.1922343475593462e-05, |
|
"loss": 0.3133, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 3.338481012658228, |
|
"grad_norm": 0.362118971664496, |
|
"learning_rate": 1.187045262895488e-05, |
|
"loss": 0.309, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 3.34253164556962, |
|
"grad_norm": 0.43366573402606745, |
|
"learning_rate": 1.1818627256573203e-05, |
|
"loss": 0.3479, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 3.3465822784810126, |
|
"grad_norm": 0.36743147794730624, |
|
"learning_rate": 1.1766867775842864e-05, |
|
"loss": 0.3159, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 3.350632911392405, |
|
"grad_norm": 0.3981477096902416, |
|
"learning_rate": 1.1715174603627615e-05, |
|
"loss": 0.2813, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 3.3546835443037972, |
|
"grad_norm": 0.37006047587597124, |
|
"learning_rate": 1.1663548156257147e-05, |
|
"loss": 0.317, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 3.3587341772151897, |
|
"grad_norm": 0.39765301722274793, |
|
"learning_rate": 1.161198884952377e-05, |
|
"loss": 0.3247, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 3.3627848101265823, |
|
"grad_norm": 0.3831017490315007, |
|
"learning_rate": 1.1560497098679056e-05, |
|
"loss": 0.327, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 3.3668354430379748, |
|
"grad_norm": 0.39629617902177805, |
|
"learning_rate": 1.1509073318430479e-05, |
|
"loss": 0.3142, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 3.3708860759493673, |
|
"grad_norm": 0.35314845223528935, |
|
"learning_rate": 1.1457717922938116e-05, |
|
"loss": 0.302, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 3.3749367088607594, |
|
"grad_norm": 0.4043235655209997, |
|
"learning_rate": 1.1406431325811233e-05, |
|
"loss": 0.3306, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 3.378987341772152, |
|
"grad_norm": 0.3670605369435142, |
|
"learning_rate": 1.135521394010506e-05, |
|
"loss": 0.3127, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 3.3830379746835444, |
|
"grad_norm": 0.4164210139476333, |
|
"learning_rate": 1.1304066178317367e-05, |
|
"loss": 0.3522, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 3.3870886075949365, |
|
"grad_norm": 0.35487967151598554, |
|
"learning_rate": 1.1252988452385199e-05, |
|
"loss": 0.2827, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 3.391139240506329, |
|
"grad_norm": 0.37753239860578636, |
|
"learning_rate": 1.1201981173681536e-05, |
|
"loss": 0.324, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 3.3951898734177215, |
|
"grad_norm": 0.37260895301908614, |
|
"learning_rate": 1.1151044753011991e-05, |
|
"loss": 0.3073, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 3.399240506329114, |
|
"grad_norm": 0.41335809618853975, |
|
"learning_rate": 1.1100179600611491e-05, |
|
"loss": 0.3078, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 3.4032911392405065, |
|
"grad_norm": 0.3953321969776827, |
|
"learning_rate": 1.1049386126140985e-05, |
|
"loss": 0.3564, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 3.4073417721518986, |
|
"grad_norm": 0.3623353364640113, |
|
"learning_rate": 1.0998664738684128e-05, |
|
"loss": 0.2744, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 3.411392405063291, |
|
"grad_norm": 0.40110626009611683, |
|
"learning_rate": 1.0948015846744e-05, |
|
"loss": 0.3266, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 3.4154430379746836, |
|
"grad_norm": 0.36797986497696783, |
|
"learning_rate": 1.0897439858239832e-05, |
|
"loss": 0.2984, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 3.419493670886076, |
|
"grad_norm": 0.3897141737167115, |
|
"learning_rate": 1.0846937180503652e-05, |
|
"loss": 0.3103, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 3.423544303797468, |
|
"grad_norm": 0.3773502712546857, |
|
"learning_rate": 1.0796508220277117e-05, |
|
"loss": 0.3167, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 3.4275949367088607, |
|
"grad_norm": 0.37284197823784093, |
|
"learning_rate": 1.0746153383708107e-05, |
|
"loss": 0.3288, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 3.4316455696202532, |
|
"grad_norm": 0.39583666436785653, |
|
"learning_rate": 1.0695873076347579e-05, |
|
"loss": 0.2982, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 3.4356962025316458, |
|
"grad_norm": 0.37812322066277687, |
|
"learning_rate": 1.0645667703146205e-05, |
|
"loss": 0.2779, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 3.439746835443038, |
|
"grad_norm": 0.4175768986539499, |
|
"learning_rate": 1.0595537668451161e-05, |
|
"loss": 0.3384, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 3.4437974683544303, |
|
"grad_norm": 0.36434646102486656, |
|
"learning_rate": 1.0545483376002854e-05, |
|
"loss": 0.2964, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 3.447848101265823, |
|
"grad_norm": 0.37278923534295577, |
|
"learning_rate": 1.0495505228931676e-05, |
|
"loss": 0.3284, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 3.4518987341772154, |
|
"grad_norm": 0.3720135351582702, |
|
"learning_rate": 1.044560362975474e-05, |
|
"loss": 0.3069, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 3.4559493670886074, |
|
"grad_norm": 0.33226935443853306, |
|
"learning_rate": 1.0395778980372695e-05, |
|
"loss": 0.2929, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"grad_norm": 0.3612729426716603, |
|
"learning_rate": 1.0346031682066381e-05, |
|
"loss": 0.3229, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 3.4640506329113925, |
|
"grad_norm": 0.3989475476963846, |
|
"learning_rate": 1.0296362135493724e-05, |
|
"loss": 0.3257, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 3.468101265822785, |
|
"grad_norm": 0.34102247885466036, |
|
"learning_rate": 1.0246770740686422e-05, |
|
"loss": 0.2895, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 3.472151898734177, |
|
"grad_norm": 0.38596000775074185, |
|
"learning_rate": 1.0197257897046743e-05, |
|
"loss": 0.3343, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 3.4762025316455696, |
|
"grad_norm": 0.3468688240776255, |
|
"learning_rate": 1.014782400334433e-05, |
|
"loss": 0.2893, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 3.480253164556962, |
|
"grad_norm": 0.35977254087850075, |
|
"learning_rate": 1.009846945771296e-05, |
|
"loss": 0.3097, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 3.4843037974683546, |
|
"grad_norm": 0.36956624968646595, |
|
"learning_rate": 1.0049194657647363e-05, |
|
"loss": 0.3041, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 3.4883544303797467, |
|
"grad_norm": 0.42250224017958676, |
|
"learning_rate": 1.0000000000000006e-05, |
|
"loss": 0.3396, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 3.492405063291139, |
|
"grad_norm": 0.32913679660059836, |
|
"learning_rate": 9.950885880977891e-06, |
|
"loss": 0.2966, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 3.4964556962025317, |
|
"grad_norm": 0.4083367860679121, |
|
"learning_rate": 9.901852696139382e-06, |
|
"loss": 0.3268, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 3.5005063291139242, |
|
"grad_norm": 0.4017224425650291, |
|
"learning_rate": 9.852900840391027e-06, |
|
"loss": 0.3133, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 3.5045569620253163, |
|
"grad_norm": 0.38760982866095317, |
|
"learning_rate": 9.804030707984313e-06, |
|
"loss": 0.3331, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 3.508607594936709, |
|
"grad_norm": 0.3413876708230215, |
|
"learning_rate": 9.755242692512599e-06, |
|
"loss": 0.2936, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 3.5126582278481013, |
|
"grad_norm": 0.3770148775037438, |
|
"learning_rate": 9.70653718690782e-06, |
|
"loss": 0.3401, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 3.5167088607594934, |
|
"grad_norm": 0.37355081538418694, |
|
"learning_rate": 9.657914583437454e-06, |
|
"loss": 0.3157, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 3.520759493670886, |
|
"grad_norm": 0.36022453790279296, |
|
"learning_rate": 9.609375273701246e-06, |
|
"loss": 0.3102, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 3.5248101265822784, |
|
"grad_norm": 0.3846289424593832, |
|
"learning_rate": 9.560919648628133e-06, |
|
"loss": 0.3397, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 3.528860759493671, |
|
"grad_norm": 0.3471139541017973, |
|
"learning_rate": 9.512548098473047e-06, |
|
"loss": 0.3077, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 3.5329113924050635, |
|
"grad_norm": 0.37936388129542836, |
|
"learning_rate": 9.464261012813825e-06, |
|
"loss": 0.3097, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 3.5369620253164555, |
|
"grad_norm": 0.419304396446441, |
|
"learning_rate": 9.416058780547987e-06, |
|
"loss": 0.3256, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 3.541012658227848, |
|
"grad_norm": 0.36961429300197923, |
|
"learning_rate": 9.367941789889714e-06, |
|
"loss": 0.3214, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 3.5450632911392406, |
|
"grad_norm": 0.37452798196218196, |
|
"learning_rate": 9.319910428366607e-06, |
|
"loss": 0.3062, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 3.5491139240506326, |
|
"grad_norm": 0.3847151434752731, |
|
"learning_rate": 9.271965082816667e-06, |
|
"loss": 0.3306, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 3.553164556962025, |
|
"grad_norm": 0.3722974081286379, |
|
"learning_rate": 9.224106139385111e-06, |
|
"loss": 0.3188, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 3.5572151898734177, |
|
"grad_norm": 0.38793903265357, |
|
"learning_rate": 9.176333983521291e-06, |
|
"loss": 0.3161, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 3.56126582278481, |
|
"grad_norm": 0.38692851469718026, |
|
"learning_rate": 9.12864899997558e-06, |
|
"loss": 0.3275, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 3.5653164556962027, |
|
"grad_norm": 0.3380821525730518, |
|
"learning_rate": 9.08105157279628e-06, |
|
"loss": 0.2895, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 3.5693670886075948, |
|
"grad_norm": 0.394868362243374, |
|
"learning_rate": 9.03354208532653e-06, |
|
"loss": 0.3414, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 3.5734177215189873, |
|
"grad_norm": 0.36875011153359694, |
|
"learning_rate": 8.986120920201205e-06, |
|
"loss": 0.311, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 3.57746835443038, |
|
"grad_norm": 0.3621645591121142, |
|
"learning_rate": 8.938788459343852e-06, |
|
"loss": 0.3019, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 3.581518987341772, |
|
"grad_norm": 0.3438886690677529, |
|
"learning_rate": 8.8915450839636e-06, |
|
"loss": 0.2957, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 3.5855696202531644, |
|
"grad_norm": 0.39941548934287285, |
|
"learning_rate": 8.844391174552116e-06, |
|
"loss": 0.3595, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 3.589620253164557, |
|
"grad_norm": 0.3455785906620442, |
|
"learning_rate": 8.797327110880479e-06, |
|
"loss": 0.3056, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 3.5936708860759494, |
|
"grad_norm": 0.3461373476211002, |
|
"learning_rate": 8.750353271996206e-06, |
|
"loss": 0.3002, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 3.597721518987342, |
|
"grad_norm": 0.39761643110749745, |
|
"learning_rate": 8.703470036220132e-06, |
|
"loss": 0.3578, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 3.601772151898734, |
|
"grad_norm": 0.3685788036110618, |
|
"learning_rate": 8.656677781143394e-06, |
|
"loss": 0.3266, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 3.6058227848101265, |
|
"grad_norm": 0.3282117732505965, |
|
"learning_rate": 8.609976883624377e-06, |
|
"loss": 0.2739, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 3.609873417721519, |
|
"grad_norm": 0.3518373996703747, |
|
"learning_rate": 8.563367719785698e-06, |
|
"loss": 0.3208, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 3.6139240506329116, |
|
"grad_norm": 0.3796651623613062, |
|
"learning_rate": 8.516850665011138e-06, |
|
"loss": 0.3612, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 3.617974683544304, |
|
"grad_norm": 0.34894299136907087, |
|
"learning_rate": 8.47042609394269e-06, |
|
"loss": 0.3089, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 3.622025316455696, |
|
"grad_norm": 0.34465673844835265, |
|
"learning_rate": 8.424094380477432e-06, |
|
"loss": 0.3029, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 3.6260759493670887, |
|
"grad_norm": 0.3469102415865181, |
|
"learning_rate": 8.37785589776465e-06, |
|
"loss": 0.3099, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 3.630126582278481, |
|
"grad_norm": 0.3367891855843339, |
|
"learning_rate": 8.331711018202694e-06, |
|
"loss": 0.3011, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 3.6341772151898732, |
|
"grad_norm": 0.364264016562065, |
|
"learning_rate": 8.285660113436104e-06, |
|
"loss": 0.3051, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 3.6382278481012658, |
|
"grad_norm": 0.35443956623320216, |
|
"learning_rate": 8.239703554352527e-06, |
|
"loss": 0.3169, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 3.6422784810126583, |
|
"grad_norm": 0.38299149940593485, |
|
"learning_rate": 8.193841711079775e-06, |
|
"loss": 0.3245, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 3.646329113924051, |
|
"grad_norm": 0.371103548429182, |
|
"learning_rate": 8.148074952982828e-06, |
|
"loss": 0.3051, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 3.6503797468354433, |
|
"grad_norm": 0.3607474318870085, |
|
"learning_rate": 8.102403648660859e-06, |
|
"loss": 0.3236, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 3.6544303797468354, |
|
"grad_norm": 0.34945818896197534, |
|
"learning_rate": 8.056828165944282e-06, |
|
"loss": 0.3272, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 3.658481012658228, |
|
"grad_norm": 0.41038566664586423, |
|
"learning_rate": 8.011348871891762e-06, |
|
"loss": 0.3383, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 3.6625316455696204, |
|
"grad_norm": 0.34240718675119697, |
|
"learning_rate": 7.965966132787287e-06, |
|
"loss": 0.2915, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 3.6665822784810125, |
|
"grad_norm": 0.35409653136469155, |
|
"learning_rate": 7.920680314137189e-06, |
|
"loss": 0.3013, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 3.670632911392405, |
|
"grad_norm": 0.3512799218823998, |
|
"learning_rate": 7.875491780667246e-06, |
|
"loss": 0.3242, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 3.6746835443037975, |
|
"grad_norm": 0.3312592092347123, |
|
"learning_rate": 7.830400896319667e-06, |
|
"loss": 0.2887, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 3.67873417721519, |
|
"grad_norm": 0.3954496832819805, |
|
"learning_rate": 7.785408024250259e-06, |
|
"loss": 0.3565, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 3.6827848101265825, |
|
"grad_norm": 0.3530958531820086, |
|
"learning_rate": 7.74051352682542e-06, |
|
"loss": 0.3225, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 3.6868354430379746, |
|
"grad_norm": 0.3436895964655284, |
|
"learning_rate": 7.695717765619257e-06, |
|
"loss": 0.298, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 3.690886075949367, |
|
"grad_norm": 0.3790292180460926, |
|
"learning_rate": 7.651021101410673e-06, |
|
"loss": 0.3261, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 3.6949367088607596, |
|
"grad_norm": 0.342491723876838, |
|
"learning_rate": 7.606423894180464e-06, |
|
"loss": 0.3043, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 3.6989873417721517, |
|
"grad_norm": 0.3830978931887183, |
|
"learning_rate": 7.56192650310839e-06, |
|
"loss": 0.3387, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 3.7030379746835442, |
|
"grad_norm": 0.3626569000928498, |
|
"learning_rate": 7.517529286570349e-06, |
|
"loss": 0.3145, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 3.7070886075949367, |
|
"grad_norm": 0.37271934417394925, |
|
"learning_rate": 7.473232602135387e-06, |
|
"loss": 0.285, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 3.7111392405063293, |
|
"grad_norm": 0.3740012578875894, |
|
"learning_rate": 7.429036806562935e-06, |
|
"loss": 0.3216, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 3.7151898734177218, |
|
"grad_norm": 0.3386650654219099, |
|
"learning_rate": 7.3849422557998455e-06, |
|
"loss": 0.3146, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 3.719240506329114, |
|
"grad_norm": 0.35936083847618155, |
|
"learning_rate": 7.340949304977567e-06, |
|
"loss": 0.3098, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 3.7232911392405064, |
|
"grad_norm": 0.3578340254813407, |
|
"learning_rate": 7.297058308409282e-06, |
|
"loss": 0.3468, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 3.727341772151899, |
|
"grad_norm": 0.3418403006499396, |
|
"learning_rate": 7.25326961958704e-06, |
|
"loss": 0.2897, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 3.731392405063291, |
|
"grad_norm": 0.39424186149549156, |
|
"learning_rate": 7.209583591178921e-06, |
|
"loss": 0.353, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 3.7354430379746835, |
|
"grad_norm": 0.3637692557930696, |
|
"learning_rate": 7.1660005750261925e-06, |
|
"loss": 0.2909, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 3.739493670886076, |
|
"grad_norm": 0.36127877073041004, |
|
"learning_rate": 7.1225209221404765e-06, |
|
"loss": 0.2857, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 3.7435443037974685, |
|
"grad_norm": 0.4157545299042238, |
|
"learning_rate": 7.079144982700909e-06, |
|
"loss": 0.3277, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 3.747594936708861, |
|
"grad_norm": 0.39605144590681857, |
|
"learning_rate": 7.0358731060513695e-06, |
|
"loss": 0.3571, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 3.751645569620253, |
|
"grad_norm": 0.34424741922132124, |
|
"learning_rate": 6.99270564069757e-06, |
|
"loss": 0.2908, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 3.7556962025316456, |
|
"grad_norm": 0.41117987079207474, |
|
"learning_rate": 6.949642934304375e-06, |
|
"loss": 0.3226, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 3.759746835443038, |
|
"grad_norm": 0.37085192498579583, |
|
"learning_rate": 6.906685333692871e-06, |
|
"loss": 0.2981, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 3.76379746835443, |
|
"grad_norm": 0.3540674754528945, |
|
"learning_rate": 6.86383318483769e-06, |
|
"loss": 0.3074, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 3.7678481012658227, |
|
"grad_norm": 0.3426017479203356, |
|
"learning_rate": 6.821086832864139e-06, |
|
"loss": 0.3, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 3.771898734177215, |
|
"grad_norm": 0.38162057930726984, |
|
"learning_rate": 6.77844662204546e-06, |
|
"loss": 0.3133, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 3.7759493670886077, |
|
"grad_norm": 0.3543189363499177, |
|
"learning_rate": 6.7359128958000455e-06, |
|
"loss": 0.2908, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 3.7800000000000002, |
|
"grad_norm": 0.40598561510851805, |
|
"learning_rate": 6.693485996688695e-06, |
|
"loss": 0.3437, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 3.7840506329113923, |
|
"grad_norm": 0.33616958876454023, |
|
"learning_rate": 6.651166266411801e-06, |
|
"loss": 0.2704, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 3.788101265822785, |
|
"grad_norm": 0.36691573032945707, |
|
"learning_rate": 6.6089540458066725e-06, |
|
"loss": 0.3066, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 3.7921518987341774, |
|
"grad_norm": 0.3513386163169949, |
|
"learning_rate": 6.566849674844711e-06, |
|
"loss": 0.2893, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 3.7962025316455694, |
|
"grad_norm": 0.3985552312103252, |
|
"learning_rate": 6.524853492628747e-06, |
|
"loss": 0.3565, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 3.800253164556962, |
|
"grad_norm": 0.3564688826137191, |
|
"learning_rate": 6.4829658373902536e-06, |
|
"loss": 0.3132, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 3.8043037974683545, |
|
"grad_norm": 0.3350339944270921, |
|
"learning_rate": 6.441187046486648e-06, |
|
"loss": 0.2896, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 3.808354430379747, |
|
"grad_norm": 0.3521375340572988, |
|
"learning_rate": 6.399517456398567e-06, |
|
"loss": 0.3393, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 3.8124050632911395, |
|
"grad_norm": 0.30918826626239604, |
|
"learning_rate": 6.357957402727164e-06, |
|
"loss": 0.2464, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 3.8164556962025316, |
|
"grad_norm": 0.37195050749953695, |
|
"learning_rate": 6.316507220191395e-06, |
|
"loss": 0.3427, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 3.820506329113924, |
|
"grad_norm": 0.34129797025188296, |
|
"learning_rate": 6.275167242625331e-06, |
|
"loss": 0.3057, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 3.8245569620253166, |
|
"grad_norm": 0.344990402749119, |
|
"learning_rate": 6.233937802975471e-06, |
|
"loss": 0.3204, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 3.8286075949367087, |
|
"grad_norm": 0.34909152674242444, |
|
"learning_rate": 6.192819233298046e-06, |
|
"loss": 0.3068, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 3.832658227848101, |
|
"grad_norm": 0.33182979740067925, |
|
"learning_rate": 6.151811864756383e-06, |
|
"loss": 0.3099, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 3.8367088607594937, |
|
"grad_norm": 0.34373207425147884, |
|
"learning_rate": 6.1109160276181655e-06, |
|
"loss": 0.3082, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 3.840759493670886, |
|
"grad_norm": 0.3344720089303944, |
|
"learning_rate": 6.070132051252868e-06, |
|
"loss": 0.3014, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 3.8448101265822787, |
|
"grad_norm": 0.34809347145891784, |
|
"learning_rate": 6.0294602641290034e-06, |
|
"loss": 0.3233, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 3.848860759493671, |
|
"grad_norm": 0.3319320962763181, |
|
"learning_rate": 5.988900993811575e-06, |
|
"loss": 0.315, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 3.8529113924050633, |
|
"grad_norm": 0.3355841209997152, |
|
"learning_rate": 5.948454566959363e-06, |
|
"loss": 0.2897, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 3.856962025316456, |
|
"grad_norm": 0.35980161312685716, |
|
"learning_rate": 5.908121309322328e-06, |
|
"loss": 0.3239, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 3.861012658227848, |
|
"grad_norm": 0.334301034379734, |
|
"learning_rate": 5.867901545738976e-06, |
|
"loss": 0.3008, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 3.8650632911392404, |
|
"grad_norm": 0.6453249542893541, |
|
"learning_rate": 5.827795600133774e-06, |
|
"loss": 0.343, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 3.869113924050633, |
|
"grad_norm": 0.34904240383566104, |
|
"learning_rate": 5.787803795514466e-06, |
|
"loss": 0.3149, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 3.8731645569620254, |
|
"grad_norm": 0.3729049057541835, |
|
"learning_rate": 5.747926453969576e-06, |
|
"loss": 0.3114, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 3.877215189873418, |
|
"grad_norm": 0.3538326016458772, |
|
"learning_rate": 5.708163896665708e-06, |
|
"loss": 0.3093, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 3.88126582278481, |
|
"grad_norm": 0.3417131959566994, |
|
"learning_rate": 5.668516443845047e-06, |
|
"loss": 0.3099, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 3.8853164556962025, |
|
"grad_norm": 0.3374388597297284, |
|
"learning_rate": 5.6289844148227225e-06, |
|
"loss": 0.2956, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 3.889367088607595, |
|
"grad_norm": 0.33799814114938437, |
|
"learning_rate": 5.5895681279842615e-06, |
|
"loss": 0.315, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 3.893417721518987, |
|
"grad_norm": 0.3507166255518697, |
|
"learning_rate": 5.550267900783019e-06, |
|
"loss": 0.3083, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 3.8974683544303796, |
|
"grad_norm": 0.33127588202804703, |
|
"learning_rate": 5.511084049737623e-06, |
|
"loss": 0.3345, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 3.901518987341772, |
|
"grad_norm": 0.3322391280102111, |
|
"learning_rate": 5.4720168904294215e-06, |
|
"loss": 0.3273, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 3.9055696202531647, |
|
"grad_norm": 0.31519306495645705, |
|
"learning_rate": 5.433066737499948e-06, |
|
"loss": 0.2855, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 3.909620253164557, |
|
"grad_norm": 0.34257308482421905, |
|
"learning_rate": 5.394233904648376e-06, |
|
"loss": 0.3295, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 3.9136708860759493, |
|
"grad_norm": 0.32185405342675394, |
|
"learning_rate": 5.355518704628997e-06, |
|
"loss": 0.2998, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 3.9177215189873418, |
|
"grad_norm": 0.33510601474334656, |
|
"learning_rate": 5.316921449248731e-06, |
|
"loss": 0.3314, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 3.9217721518987343, |
|
"grad_norm": 0.32477076542704353, |
|
"learning_rate": 5.278442449364538e-06, |
|
"loss": 0.3116, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 3.9258227848101264, |
|
"grad_norm": 0.3245634444986896, |
|
"learning_rate": 5.240082014881016e-06, |
|
"loss": 0.2976, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 3.929873417721519, |
|
"grad_norm": 0.3739114786902045, |
|
"learning_rate": 5.201840454747822e-06, |
|
"loss": 0.3138, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 3.9339240506329114, |
|
"grad_norm": 0.3601634194345993, |
|
"learning_rate": 5.163718076957223e-06, |
|
"loss": 0.3148, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 3.937974683544304, |
|
"grad_norm": 0.33760906751704245, |
|
"learning_rate": 5.125715188541609e-06, |
|
"loss": 0.314, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 3.9420253164556964, |
|
"grad_norm": 0.3508393726943037, |
|
"learning_rate": 5.087832095571021e-06, |
|
"loss": 0.3123, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 3.9460759493670885, |
|
"grad_norm": 0.3419178379613408, |
|
"learning_rate": 5.0500691031506766e-06, |
|
"loss": 0.2975, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 3.950126582278481, |
|
"grad_norm": 0.35610405428408864, |
|
"learning_rate": 5.01242651541854e-06, |
|
"loss": 0.3468, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 3.9541772151898735, |
|
"grad_norm": 0.3545144265467179, |
|
"learning_rate": 4.974904635542815e-06, |
|
"loss": 0.3191, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 3.9582278481012656, |
|
"grad_norm": 0.3200277980737422, |
|
"learning_rate": 4.937503765719582e-06, |
|
"loss": 0.3151, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 3.962278481012658, |
|
"grad_norm": 0.35718405266882414, |
|
"learning_rate": 4.900224207170299e-06, |
|
"loss": 0.3109, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 3.9663291139240506, |
|
"grad_norm": 0.35187490686973494, |
|
"learning_rate": 4.8630662601394065e-06, |
|
"loss": 0.2958, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 3.970379746835443, |
|
"grad_norm": 0.3394591805475553, |
|
"learning_rate": 4.8260302238918995e-06, |
|
"loss": 0.3237, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 3.9744303797468357, |
|
"grad_norm": 0.30419665440408017, |
|
"learning_rate": 4.789116396710924e-06, |
|
"loss": 0.2795, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 3.9784810126582277, |
|
"grad_norm": 0.35306253697358303, |
|
"learning_rate": 4.752325075895368e-06, |
|
"loss": 0.3257, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 3.9825316455696202, |
|
"grad_norm": 0.35905718018793054, |
|
"learning_rate": 4.715656557757473e-06, |
|
"loss": 0.3039, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 3.9865822784810128, |
|
"grad_norm": 0.34127567596716135, |
|
"learning_rate": 4.679111137620442e-06, |
|
"loss": 0.3028, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 3.990632911392405, |
|
"grad_norm": 0.3152574782961082, |
|
"learning_rate": 4.6426891098160585e-06, |
|
"loss": 0.3237, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 3.9946835443037974, |
|
"grad_norm": 0.3168410799359329, |
|
"learning_rate": 4.6063907676823474e-06, |
|
"loss": 0.2925, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 3.99873417721519, |
|
"grad_norm": 0.42544466368381784, |
|
"learning_rate": 4.570216403561141e-06, |
|
"loss": 0.3523, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 4.0030379746835445, |
|
"grad_norm": 0.5079446472917329, |
|
"learning_rate": 4.534166308795815e-06, |
|
"loss": 0.3011, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 4.007088607594937, |
|
"grad_norm": 0.48591735550267373, |
|
"learning_rate": 4.498240773728859e-06, |
|
"loss": 0.1981, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 4.0111392405063295, |
|
"grad_norm": 0.46848555995876806, |
|
"learning_rate": 4.462440087699609e-06, |
|
"loss": 0.2521, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 4.015189873417722, |
|
"grad_norm": 0.35405827792303946, |
|
"learning_rate": 4.426764539041861e-06, |
|
"loss": 0.2355, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 4.019240506329114, |
|
"grad_norm": 0.44912507152009523, |
|
"learning_rate": 4.391214415081582e-06, |
|
"loss": 0.2417, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 4.023291139240507, |
|
"grad_norm": 0.6557946297419023, |
|
"learning_rate": 4.355790002134579e-06, |
|
"loss": 0.2745, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 4.027341772151899, |
|
"grad_norm": 0.5521002909442486, |
|
"learning_rate": 4.320491585504207e-06, |
|
"loss": 0.2273, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 4.031392405063291, |
|
"grad_norm": 0.43552665728868706, |
|
"learning_rate": 4.2853194494790615e-06, |
|
"loss": 0.2139, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 4.035443037974684, |
|
"grad_norm": 0.3978534096225862, |
|
"learning_rate": 4.250273877330691e-06, |
|
"loss": 0.2426, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 4.039493670886076, |
|
"grad_norm": 0.4073483653891952, |
|
"learning_rate": 4.215355151311313e-06, |
|
"loss": 0.2226, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 4.043544303797469, |
|
"grad_norm": 0.43821482225215613, |
|
"learning_rate": 4.180563552651542e-06, |
|
"loss": 0.2232, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 4.047594936708861, |
|
"grad_norm": 0.4604833556158873, |
|
"learning_rate": 4.145899361558147e-06, |
|
"loss": 0.2602, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 4.051645569620253, |
|
"grad_norm": 0.38160818268391683, |
|
"learning_rate": 4.111362857211738e-06, |
|
"loss": 0.2339, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 4.055696202531646, |
|
"grad_norm": 0.3581220866527235, |
|
"learning_rate": 4.076954317764592e-06, |
|
"loss": 0.2282, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 4.059746835443038, |
|
"grad_norm": 0.3717822450574522, |
|
"learning_rate": 4.042674020338335e-06, |
|
"loss": 0.2239, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 4.06379746835443, |
|
"grad_norm": 0.41611350408267544, |
|
"learning_rate": 4.0085222410217835e-06, |
|
"loss": 0.2347, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 4.067848101265823, |
|
"grad_norm": 0.4082106230774672, |
|
"learning_rate": 3.974499254868674e-06, |
|
"loss": 0.2355, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 4.071898734177215, |
|
"grad_norm": 0.3894466861788688, |
|
"learning_rate": 3.940605335895451e-06, |
|
"loss": 0.221, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 4.075949367088608, |
|
"grad_norm": 0.36398985187983623, |
|
"learning_rate": 3.90684075707908e-06, |
|
"loss": 0.2183, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"grad_norm": 0.35973245626163153, |
|
"learning_rate": 3.8732057903548505e-06, |
|
"loss": 0.2282, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 4.084050632911392, |
|
"grad_norm": 0.3985729200413975, |
|
"learning_rate": 3.8397007066141375e-06, |
|
"loss": 0.1935, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 4.088101265822785, |
|
"grad_norm": 0.35544450557095547, |
|
"learning_rate": 3.806325775702304e-06, |
|
"loss": 0.2375, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 4.092151898734177, |
|
"grad_norm": 0.36194096027865713, |
|
"learning_rate": 3.773081266416434e-06, |
|
"loss": 0.2585, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 4.096202531645569, |
|
"grad_norm": 0.34233061205269777, |
|
"learning_rate": 3.739967446503245e-06, |
|
"loss": 0.2106, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 4.100253164556962, |
|
"grad_norm": 0.3336448844295868, |
|
"learning_rate": 3.706984582656894e-06, |
|
"loss": 0.221, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 4.104303797468354, |
|
"grad_norm": 0.3583042088662754, |
|
"learning_rate": 3.6741329405168237e-06, |
|
"loss": 0.2474, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 4.108354430379747, |
|
"grad_norm": 0.3304132695000972, |
|
"learning_rate": 3.641412784665648e-06, |
|
"loss": 0.2135, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 4.112405063291139, |
|
"grad_norm": 0.3539932511170129, |
|
"learning_rate": 3.608824378627005e-06, |
|
"loss": 0.2473, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 4.116455696202531, |
|
"grad_norm": 0.3407178453096992, |
|
"learning_rate": 3.5763679848634337e-06, |
|
"loss": 0.258, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 4.120506329113924, |
|
"grad_norm": 0.34336345545555275, |
|
"learning_rate": 3.544043864774269e-06, |
|
"loss": 0.2242, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 4.124556962025316, |
|
"grad_norm": 0.34147928906050723, |
|
"learning_rate": 3.5118522786935282e-06, |
|
"loss": 0.2209, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 4.1286075949367085, |
|
"grad_norm": 0.34204361257591404, |
|
"learning_rate": 3.479793485887819e-06, |
|
"loss": 0.2144, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 4.132658227848101, |
|
"grad_norm": 0.338021718454069, |
|
"learning_rate": 3.4478677445542653e-06, |
|
"loss": 0.2299, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 4.1367088607594935, |
|
"grad_norm": 0.34590779947732264, |
|
"learning_rate": 3.4160753118183767e-06, |
|
"loss": 0.2473, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 4.1407594936708865, |
|
"grad_norm": 0.3331253811393305, |
|
"learning_rate": 3.3844164437320527e-06, |
|
"loss": 0.2364, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 4.1448101265822785, |
|
"grad_norm": 0.3116764499253776, |
|
"learning_rate": 3.3528913952714558e-06, |
|
"loss": 0.2094, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 4.148860759493671, |
|
"grad_norm": 0.3349520441350794, |
|
"learning_rate": 3.321500420335e-06, |
|
"loss": 0.242, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 4.152911392405064, |
|
"grad_norm": 0.3295304247506509, |
|
"learning_rate": 3.290243771741275e-06, |
|
"loss": 0.2085, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 4.156962025316456, |
|
"grad_norm": 0.3453211576036429, |
|
"learning_rate": 3.2591217012270325e-06, |
|
"loss": 0.232, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 4.161012658227848, |
|
"grad_norm": 0.33145964626630053, |
|
"learning_rate": 3.228134459445149e-06, |
|
"loss": 0.2152, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 4.165063291139241, |
|
"grad_norm": 0.34499963450122123, |
|
"learning_rate": 3.1972822959626205e-06, |
|
"loss": 0.2416, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 4.169113924050633, |
|
"grad_norm": 0.29605977202234407, |
|
"learning_rate": 3.166565459258513e-06, |
|
"loss": 0.2059, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 4.173164556962026, |
|
"grad_norm": 0.34293855994720357, |
|
"learning_rate": 3.1359841967220193e-06, |
|
"loss": 0.2487, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 4.177215189873418, |
|
"grad_norm": 0.3506169498359619, |
|
"learning_rate": 3.105538754650419e-06, |
|
"loss": 0.2513, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 4.18126582278481, |
|
"grad_norm": 0.3437392701106812, |
|
"learning_rate": 3.07522937824712e-06, |
|
"loss": 0.2328, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 4.185316455696203, |
|
"grad_norm": 0.32018805517515914, |
|
"learning_rate": 3.0450563116196697e-06, |
|
"loss": 0.2218, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 4.189367088607595, |
|
"grad_norm": 0.3181966567845084, |
|
"learning_rate": 3.0150197977778008e-06, |
|
"loss": 0.2267, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 4.193417721518987, |
|
"grad_norm": 0.31632380308508223, |
|
"learning_rate": 2.985120078631465e-06, |
|
"loss": 0.2436, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 4.19746835443038, |
|
"grad_norm": 0.3095439421345031, |
|
"learning_rate": 2.9553573949888893e-06, |
|
"loss": 0.2104, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 4.201518987341772, |
|
"grad_norm": 0.3404672613303262, |
|
"learning_rate": 2.9257319865546384e-06, |
|
"loss": 0.2371, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 4.205569620253165, |
|
"grad_norm": 0.3279491657605266, |
|
"learning_rate": 2.896244091927678e-06, |
|
"loss": 0.2355, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 4.209620253164557, |
|
"grad_norm": 0.34138367726433144, |
|
"learning_rate": 2.8668939485994584e-06, |
|
"loss": 0.2512, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 4.213670886075949, |
|
"grad_norm": 0.3190784136598891, |
|
"learning_rate": 2.837681792951994e-06, |
|
"loss": 0.2256, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 4.217721518987342, |
|
"grad_norm": 0.32109930526320646, |
|
"learning_rate": 2.808607860255981e-06, |
|
"loss": 0.2315, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 4.221772151898734, |
|
"grad_norm": 0.31766548396877464, |
|
"learning_rate": 2.7796723846688634e-06, |
|
"loss": 0.2437, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 4.225822784810126, |
|
"grad_norm": 0.3170148705923161, |
|
"learning_rate": 2.7508755992329937e-06, |
|
"loss": 0.2384, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 4.229873417721519, |
|
"grad_norm": 0.3249249091189558, |
|
"learning_rate": 2.722217735873718e-06, |
|
"loss": 0.2317, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 4.233924050632911, |
|
"grad_norm": 0.3466453032359142, |
|
"learning_rate": 2.6936990253975315e-06, |
|
"loss": 0.2355, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 4.237974683544304, |
|
"grad_norm": 0.3034524136870784, |
|
"learning_rate": 2.665319697490205e-06, |
|
"loss": 0.2163, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 4.242025316455696, |
|
"grad_norm": 0.33775626231413175, |
|
"learning_rate": 2.637079980714945e-06, |
|
"loss": 0.2387, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 4.246075949367088, |
|
"grad_norm": 0.3155863134689609, |
|
"learning_rate": 2.6089801025105453e-06, |
|
"loss": 0.2231, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 4.250126582278481, |
|
"grad_norm": 0.3354759123251955, |
|
"learning_rate": 2.581020289189571e-06, |
|
"loss": 0.2491, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 4.254177215189873, |
|
"grad_norm": 0.32495953517164294, |
|
"learning_rate": 2.553200765936501e-06, |
|
"loss": 0.217, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 4.258227848101265, |
|
"grad_norm": 0.3193506754937909, |
|
"learning_rate": 2.525521756805962e-06, |
|
"loss": 0.2523, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 4.262278481012658, |
|
"grad_norm": 0.3176717626652151, |
|
"learning_rate": 2.497983484720885e-06, |
|
"loss": 0.2354, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 4.2663291139240505, |
|
"grad_norm": 0.298249410672444, |
|
"learning_rate": 2.470586171470728e-06, |
|
"loss": 0.2026, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 4.270379746835443, |
|
"grad_norm": 0.34515905313024625, |
|
"learning_rate": 2.4433300377096836e-06, |
|
"loss": 0.239, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 4.2744303797468355, |
|
"grad_norm": 0.32867697398738444, |
|
"learning_rate": 2.4162153029549073e-06, |
|
"loss": 0.2312, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 4.2784810126582276, |
|
"grad_norm": 0.29212763757381394, |
|
"learning_rate": 2.3892421855847458e-06, |
|
"loss": 0.1912, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 4.2825316455696205, |
|
"grad_norm": 0.3245547619698615, |
|
"learning_rate": 2.362410902836978e-06, |
|
"loss": 0.2504, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 4.286582278481013, |
|
"grad_norm": 0.31248559982527035, |
|
"learning_rate": 2.3357216708070653e-06, |
|
"loss": 0.2337, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 4.290632911392405, |
|
"grad_norm": 0.34262130840148647, |
|
"learning_rate": 2.309174704446411e-06, |
|
"loss": 0.2537, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 4.294683544303798, |
|
"grad_norm": 0.3431128508840366, |
|
"learning_rate": 2.2827702175606437e-06, |
|
"loss": 0.2286, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 4.29873417721519, |
|
"grad_norm": 0.32257689761664654, |
|
"learning_rate": 2.256508422807855e-06, |
|
"loss": 0.2467, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 4.302784810126583, |
|
"grad_norm": 0.32486990653464815, |
|
"learning_rate": 2.230389531696946e-06, |
|
"loss": 0.2372, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 4.306835443037975, |
|
"grad_norm": 0.3083400737028036, |
|
"learning_rate": 2.204413754585857e-06, |
|
"loss": 0.2085, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 4.310886075949367, |
|
"grad_norm": 0.332057007592239, |
|
"learning_rate": 2.1785813006799406e-06, |
|
"loss": 0.2355, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 4.31493670886076, |
|
"grad_norm": 0.31885629489654344, |
|
"learning_rate": 2.1528923780302224e-06, |
|
"loss": 0.2209, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 4.318987341772152, |
|
"grad_norm": 0.32253470917366195, |
|
"learning_rate": 2.127347193531757e-06, |
|
"loss": 0.2275, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 4.323037974683544, |
|
"grad_norm": 0.31519492587792414, |
|
"learning_rate": 2.101945952921942e-06, |
|
"loss": 0.2212, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 4.327088607594937, |
|
"grad_norm": 0.3262900142494503, |
|
"learning_rate": 2.0766888607788906e-06, |
|
"loss": 0.229, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 4.331139240506329, |
|
"grad_norm": 0.31742288467555463, |
|
"learning_rate": 2.0515761205197337e-06, |
|
"loss": 0.2267, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 4.335189873417722, |
|
"grad_norm": 0.31614735342274686, |
|
"learning_rate": 2.0266079343990453e-06, |
|
"loss": 0.2459, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 4.339240506329114, |
|
"grad_norm": 0.32365147265856875, |
|
"learning_rate": 2.0017845035071494e-06, |
|
"loss": 0.2397, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 4.343291139240506, |
|
"grad_norm": 0.3127732912592606, |
|
"learning_rate": 1.9771060277685537e-06, |
|
"loss": 0.218, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 4.347341772151899, |
|
"grad_norm": 0.3342590461128827, |
|
"learning_rate": 1.95257270594031e-06, |
|
"loss": 0.2464, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 4.351392405063291, |
|
"grad_norm": 0.3071935707035216, |
|
"learning_rate": 1.9281847356104188e-06, |
|
"loss": 0.2184, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 4.355443037974683, |
|
"grad_norm": 0.303000305625921, |
|
"learning_rate": 1.9039423131962365e-06, |
|
"loss": 0.2201, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 4.359493670886076, |
|
"grad_norm": 0.29775756614384974, |
|
"learning_rate": 1.8798456339429027e-06, |
|
"loss": 0.2193, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 4.363544303797468, |
|
"grad_norm": 0.30820239396781407, |
|
"learning_rate": 1.8558948919217612e-06, |
|
"loss": 0.2304, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 4.367594936708861, |
|
"grad_norm": 0.3020638237193348, |
|
"learning_rate": 1.8320902800287954e-06, |
|
"loss": 0.2041, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 4.371645569620253, |
|
"grad_norm": 0.3253902771506906, |
|
"learning_rate": 1.8084319899830726e-06, |
|
"loss": 0.256, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 4.375696202531645, |
|
"grad_norm": 0.31042696749026594, |
|
"learning_rate": 1.7849202123252097e-06, |
|
"loss": 0.2503, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 4.379746835443038, |
|
"grad_norm": 0.3116225250053105, |
|
"learning_rate": 1.7615551364158401e-06, |
|
"loss": 0.2155, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 4.38379746835443, |
|
"grad_norm": 0.30655624662225417, |
|
"learning_rate": 1.738336950434061e-06, |
|
"loss": 0.2107, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 4.387848101265822, |
|
"grad_norm": 0.32341589469313037, |
|
"learning_rate": 1.715265841375957e-06, |
|
"loss": 0.2234, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 4.391898734177215, |
|
"grad_norm": 0.3302027233873041, |
|
"learning_rate": 1.6923419950530684e-06, |
|
"loss": 0.2288, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 4.395949367088607, |
|
"grad_norm": 0.29849157538121274, |
|
"learning_rate": 1.6695655960909008e-06, |
|
"loss": 0.2256, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"grad_norm": 0.34546771697443024, |
|
"learning_rate": 1.646936827927441e-06, |
|
"loss": 0.2592, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 4.404050632911392, |
|
"grad_norm": 0.3088003825597379, |
|
"learning_rate": 1.6244558728116766e-06, |
|
"loss": 0.2177, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 4.4081012658227845, |
|
"grad_norm": 0.3144112473003213, |
|
"learning_rate": 1.6021229118021265e-06, |
|
"loss": 0.2357, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 4.4121518987341775, |
|
"grad_norm": 0.3011080191029588, |
|
"learning_rate": 1.5799381247653967e-06, |
|
"loss": 0.2152, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 4.4162025316455695, |
|
"grad_norm": 0.3364853148360307, |
|
"learning_rate": 1.5579016903747013e-06, |
|
"loss": 0.2404, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 4.4202531645569625, |
|
"grad_norm": 0.3074940188453271, |
|
"learning_rate": 1.5360137861084656e-06, |
|
"loss": 0.2211, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 4.424303797468355, |
|
"grad_norm": 0.31616263616502766, |
|
"learning_rate": 1.5142745882488475e-06, |
|
"loss": 0.2246, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 4.428354430379747, |
|
"grad_norm": 0.3067886307700066, |
|
"learning_rate": 1.4926842718803691e-06, |
|
"loss": 0.2442, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 4.43240506329114, |
|
"grad_norm": 0.2953526484387316, |
|
"learning_rate": 1.4712430108884657e-06, |
|
"loss": 0.2035, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 4.436455696202532, |
|
"grad_norm": 0.3159878798450858, |
|
"learning_rate": 1.4499509779581078e-06, |
|
"loss": 0.2325, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 4.440506329113924, |
|
"grad_norm": 0.3301863152340416, |
|
"learning_rate": 1.4288083445723988e-06, |
|
"loss": 0.2516, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 4.444556962025317, |
|
"grad_norm": 0.31076213447323425, |
|
"learning_rate": 1.4078152810112045e-06, |
|
"loss": 0.2071, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 4.448607594936709, |
|
"grad_norm": 0.3095805873210312, |
|
"learning_rate": 1.3869719563497697e-06, |
|
"loss": 0.2101, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 4.452658227848101, |
|
"grad_norm": 0.3380813878403558, |
|
"learning_rate": 1.3662785384573663e-06, |
|
"loss": 0.2594, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 4.456708860759494, |
|
"grad_norm": 0.31001554222577443, |
|
"learning_rate": 1.3457351939959383e-06, |
|
"loss": 0.2141, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 4.460759493670886, |
|
"grad_norm": 0.31667484129312723, |
|
"learning_rate": 1.3253420884187551e-06, |
|
"loss": 0.2407, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 4.464810126582279, |
|
"grad_norm": 0.3430885546343663, |
|
"learning_rate": 1.3050993859690953e-06, |
|
"loss": 0.2285, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 4.468860759493671, |
|
"grad_norm": 0.3044416177295222, |
|
"learning_rate": 1.2850072496788869e-06, |
|
"loss": 0.2117, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 4.472911392405063, |
|
"grad_norm": 0.3180842490310364, |
|
"learning_rate": 1.2650658413674434e-06, |
|
"loss": 0.2393, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 4.476962025316456, |
|
"grad_norm": 0.32651385754680534, |
|
"learning_rate": 1.2452753216401226e-06, |
|
"loss": 0.2494, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 4.481012658227848, |
|
"grad_norm": 0.2913253122012983, |
|
"learning_rate": 1.2256358498870503e-06, |
|
"loss": 0.2078, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 4.485063291139241, |
|
"grad_norm": 0.3354544253680717, |
|
"learning_rate": 1.2061475842818337e-06, |
|
"loss": 0.2495, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 4.489113924050633, |
|
"grad_norm": 0.3169338876557915, |
|
"learning_rate": 1.1868106817802816e-06, |
|
"loss": 0.2375, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 4.493164556962025, |
|
"grad_norm": 0.30273373932017783, |
|
"learning_rate": 1.1676252981191482e-06, |
|
"loss": 0.2148, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 4.497215189873418, |
|
"grad_norm": 0.3218480414890918, |
|
"learning_rate": 1.1485915878148823e-06, |
|
"loss": 0.2325, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 4.50126582278481, |
|
"grad_norm": 0.334803791916304, |
|
"learning_rate": 1.1297097041623584e-06, |
|
"loss": 0.2464, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 4.505316455696202, |
|
"grad_norm": 0.3122386818531078, |
|
"learning_rate": 1.1109797992336847e-06, |
|
"loss": 0.236, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 4.509367088607595, |
|
"grad_norm": 0.30807736087196874, |
|
"learning_rate": 1.092402023876933e-06, |
|
"loss": 0.2258, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 4.513417721518987, |
|
"grad_norm": 0.31076768866106047, |
|
"learning_rate": 1.0739765277149527e-06, |
|
"loss": 0.2257, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 4.517468354430379, |
|
"grad_norm": 0.32802762576815847, |
|
"learning_rate": 1.0557034591441596e-06, |
|
"loss": 0.2618, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 4.521518987341772, |
|
"grad_norm": 0.32579635807344726, |
|
"learning_rate": 1.0375829653333324e-06, |
|
"loss": 0.2248, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 4.525569620253164, |
|
"grad_norm": 0.3220599660092697, |
|
"learning_rate": 1.0196151922224385e-06, |
|
"loss": 0.2363, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 4.529620253164557, |
|
"grad_norm": 0.29942997295553764, |
|
"learning_rate": 1.0018002845214526e-06, |
|
"loss": 0.2228, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 4.533670886075949, |
|
"grad_norm": 0.32408645964663035, |
|
"learning_rate": 9.841383857091947e-07, |
|
"loss": 0.2275, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 4.537721518987341, |
|
"grad_norm": 0.3186218458585043, |
|
"learning_rate": 9.666296380321616e-07, |
|
"loss": 0.2283, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 4.541772151898734, |
|
"grad_norm": 0.312784360717046, |
|
"learning_rate": 9.492741825034124e-07, |
|
"loss": 0.2271, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 4.5458227848101265, |
|
"grad_norm": 0.32069465252098084, |
|
"learning_rate": 9.320721589013892e-07, |
|
"loss": 0.2382, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 4.549873417721519, |
|
"grad_norm": 0.29652914526583973, |
|
"learning_rate": 9.150237057688339e-07, |
|
"loss": 0.2181, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 4.5539240506329115, |
|
"grad_norm": 0.3295795346735293, |
|
"learning_rate": 8.981289604116328e-07, |
|
"loss": 0.2478, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 4.557974683544304, |
|
"grad_norm": 0.3132448559406914, |
|
"learning_rate": 8.813880588977542e-07, |
|
"loss": 0.2397, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 4.5620253164556965, |
|
"grad_norm": 0.3091039941062537, |
|
"learning_rate": 8.648011360561126e-07, |
|
"loss": 0.2397, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 4.566075949367089, |
|
"grad_norm": 0.29111887021385624, |
|
"learning_rate": 8.483683254755037e-07, |
|
"loss": 0.2171, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 4.570126582278481, |
|
"grad_norm": 0.2927043510013831, |
|
"learning_rate": 8.320897595035227e-07, |
|
"loss": 0.2055, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 4.574177215189874, |
|
"grad_norm": 0.33886222165268953, |
|
"learning_rate": 8.159655692455093e-07, |
|
"loss": 0.2682, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 4.578227848101266, |
|
"grad_norm": 0.30446442339679847, |
|
"learning_rate": 7.999958845634648e-07, |
|
"loss": 0.2086, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 4.582278481012658, |
|
"grad_norm": 0.31620373980696137, |
|
"learning_rate": 7.841808340750478e-07, |
|
"loss": 0.2559, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 4.586329113924051, |
|
"grad_norm": 0.3078258416863651, |
|
"learning_rate": 7.685205451524869e-07, |
|
"loss": 0.2315, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 4.590379746835443, |
|
"grad_norm": 0.3365108458402454, |
|
"learning_rate": 7.530151439216027e-07, |
|
"loss": 0.2279, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 4.594430379746836, |
|
"grad_norm": 0.31444736424234104, |
|
"learning_rate": 7.376647552607675e-07, |
|
"loss": 0.203, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 4.598481012658228, |
|
"grad_norm": 0.3266085290876586, |
|
"learning_rate": 7.224695027998963e-07, |
|
"loss": 0.2643, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 4.60253164556962, |
|
"grad_norm": 0.3214273051515812, |
|
"learning_rate": 7.07429508919466e-07, |
|
"loss": 0.2186, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 4.606582278481013, |
|
"grad_norm": 0.31347816330548894, |
|
"learning_rate": 6.925448947495206e-07, |
|
"loss": 0.2146, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 4.610632911392405, |
|
"grad_norm": 0.3075523933923044, |
|
"learning_rate": 6.778157801686936e-07, |
|
"loss": 0.2265, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 4.614683544303798, |
|
"grad_norm": 0.3183296319627729, |
|
"learning_rate": 6.632422838032515e-07, |
|
"loss": 0.2541, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 4.61873417721519, |
|
"grad_norm": 0.30938684380910697, |
|
"learning_rate": 6.488245230261281e-07, |
|
"loss": 0.2259, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 4.622784810126582, |
|
"grad_norm": 0.3830715607402039, |
|
"learning_rate": 6.345626139559868e-07, |
|
"loss": 0.2401, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 4.626835443037975, |
|
"grad_norm": 0.3247177356364176, |
|
"learning_rate": 6.204566714562866e-07, |
|
"loss": 0.2413, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 4.630886075949367, |
|
"grad_norm": 0.3065317992703947, |
|
"learning_rate": 6.06506809134344e-07, |
|
"loss": 0.2158, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 4.634936708860759, |
|
"grad_norm": 0.3104542093882322, |
|
"learning_rate": 5.927131393404373e-07, |
|
"loss": 0.253, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 4.638987341772152, |
|
"grad_norm": 0.307200753547952, |
|
"learning_rate": 5.790757731668817e-07, |
|
"loss": 0.2296, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 4.643037974683544, |
|
"grad_norm": 0.33312387614770717, |
|
"learning_rate": 5.655948204471507e-07, |
|
"loss": 0.2206, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 4.647088607594936, |
|
"grad_norm": 0.31648243754201516, |
|
"learning_rate": 5.522703897549875e-07, |
|
"loss": 0.2392, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 4.651139240506329, |
|
"grad_norm": 0.30520454259274693, |
|
"learning_rate": 5.391025884035239e-07, |
|
"loss": 0.2354, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 4.655189873417721, |
|
"grad_norm": 0.2939122317350534, |
|
"learning_rate": 5.260915224444207e-07, |
|
"loss": 0.2037, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 4.659240506329114, |
|
"grad_norm": 0.33931610614614355, |
|
"learning_rate": 5.132372966670129e-07, |
|
"loss": 0.2663, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 4.663291139240506, |
|
"grad_norm": 0.32928578137312636, |
|
"learning_rate": 5.005400145974704e-07, |
|
"loss": 0.228, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 4.667341772151898, |
|
"grad_norm": 0.3034247566710897, |
|
"learning_rate": 4.879997784979562e-07, |
|
"loss": 0.2222, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 4.671392405063291, |
|
"grad_norm": 0.3088414350440436, |
|
"learning_rate": 4.7561668936580984e-07, |
|
"loss": 0.2461, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 4.675443037974683, |
|
"grad_norm": 0.3151104636532443, |
|
"learning_rate": 4.6339084693272306e-07, |
|
"loss": 0.2316, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 4.679493670886076, |
|
"grad_norm": 0.31440747155009957, |
|
"learning_rate": 4.5132234966395847e-07, |
|
"loss": 0.2459, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 4.6835443037974684, |
|
"grad_norm": 0.3082633876565363, |
|
"learning_rate": 4.3941129475752795e-07, |
|
"loss": 0.2093, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 4.6875949367088605, |
|
"grad_norm": 0.320926650183746, |
|
"learning_rate": 4.27657778143431e-07, |
|
"loss": 0.2379, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 4.6916455696202535, |
|
"grad_norm": 0.30907391317376687, |
|
"learning_rate": 4.1606189448287757e-07, |
|
"loss": 0.2168, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 4.6956962025316455, |
|
"grad_norm": 0.3205278197568248, |
|
"learning_rate": 4.046237371675177e-07, |
|
"loss": 0.26, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 4.699746835443038, |
|
"grad_norm": 0.30711817378349, |
|
"learning_rate": 3.9334339831869963e-07, |
|
"loss": 0.2398, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 4.703797468354431, |
|
"grad_norm": 0.3145593412047012, |
|
"learning_rate": 3.8222096878671955e-07, |
|
"loss": 0.2275, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 4.707848101265823, |
|
"grad_norm": 0.33214044996459674, |
|
"learning_rate": 3.7125653815009545e-07, |
|
"loss": 0.2337, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 4.711898734177215, |
|
"grad_norm": 0.30813754159720574, |
|
"learning_rate": 3.6045019471484974e-07, |
|
"loss": 0.2447, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 4.715949367088608, |
|
"grad_norm": 0.29606314360788527, |
|
"learning_rate": 3.498020255137813e-07, |
|
"loss": 0.2301, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"grad_norm": 0.30864347993821173, |
|
"learning_rate": 3.393121163057811e-07, |
|
"loss": 0.2521, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 4.724050632911393, |
|
"grad_norm": 0.3187845215983957, |
|
"learning_rate": 3.289805515751399e-07, |
|
"loss": 0.2346, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 4.728101265822785, |
|
"grad_norm": 0.3045077623974581, |
|
"learning_rate": 3.188074145308573e-07, |
|
"loss": 0.2424, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 4.732151898734177, |
|
"grad_norm": 0.29352436946662647, |
|
"learning_rate": 3.087927871059804e-07, |
|
"loss": 0.2347, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 4.73620253164557, |
|
"grad_norm": 0.3213587794740573, |
|
"learning_rate": 2.989367499569418e-07, |
|
"loss": 0.2366, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 4.740253164556962, |
|
"grad_norm": 0.3047735871453943, |
|
"learning_rate": 2.8923938246290917e-07, |
|
"loss": 0.2328, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 4.744303797468355, |
|
"grad_norm": 0.29569939270750684, |
|
"learning_rate": 2.7970076272514804e-07, |
|
"loss": 0.2252, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 4.748354430379747, |
|
"grad_norm": 0.31715841469755546, |
|
"learning_rate": 2.703209675663887e-07, |
|
"loss": 0.2488, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 4.752405063291139, |
|
"grad_norm": 0.30945943467494474, |
|
"learning_rate": 2.6110007253021374e-07, |
|
"loss": 0.2176, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 4.756455696202532, |
|
"grad_norm": 0.32455906763710307, |
|
"learning_rate": 2.520381518804471e-07, |
|
"loss": 0.2374, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 4.760506329113924, |
|
"grad_norm": 0.3252555515825012, |
|
"learning_rate": 2.4313527860054585e-07, |
|
"loss": 0.2487, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 4.764556962025316, |
|
"grad_norm": 0.31492645788936324, |
|
"learning_rate": 2.343915243930317e-07, |
|
"loss": 0.2205, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 4.768607594936709, |
|
"grad_norm": 0.34083088066551387, |
|
"learning_rate": 2.2580695967889367e-07, |
|
"loss": 0.2344, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 4.772658227848101, |
|
"grad_norm": 0.31313761717784216, |
|
"learning_rate": 2.1738165359704189e-07, |
|
"loss": 0.2421, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 4.776708860759493, |
|
"grad_norm": 0.2952093332637497, |
|
"learning_rate": 2.0911567400373257e-07, |
|
"loss": 0.2005, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 4.780759493670886, |
|
"grad_norm": 0.3193947001836654, |
|
"learning_rate": 2.0100908747202607e-07, |
|
"loss": 0.2211, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 4.784810126582278, |
|
"grad_norm": 0.3345875925677834, |
|
"learning_rate": 1.9306195929125638e-07, |
|
"loss": 0.2341, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 4.788860759493671, |
|
"grad_norm": 0.3258772697092736, |
|
"learning_rate": 1.8527435346650247e-07, |
|
"loss": 0.2168, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 4.792911392405063, |
|
"grad_norm": 0.30216475862425457, |
|
"learning_rate": 1.7764633271807108e-07, |
|
"loss": 0.2085, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 4.796962025316455, |
|
"grad_norm": 0.3110905799931299, |
|
"learning_rate": 1.7017795848099262e-07, |
|
"loss": 0.2558, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 4.801012658227848, |
|
"grad_norm": 0.3184004923259186, |
|
"learning_rate": 1.6286929090452596e-07, |
|
"loss": 0.2349, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 4.80506329113924, |
|
"grad_norm": 0.308148303842863, |
|
"learning_rate": 1.557203888516745e-07, |
|
"loss": 0.2124, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 4.809113924050633, |
|
"grad_norm": 0.3097948563084637, |
|
"learning_rate": 1.487313098987131e-07, |
|
"loss": 0.2262, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 4.813164556962025, |
|
"grad_norm": 0.32670313641358384, |
|
"learning_rate": 1.4190211033472402e-07, |
|
"loss": 0.2505, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 4.8172151898734175, |
|
"grad_norm": 0.30104393067622326, |
|
"learning_rate": 1.3523284516113955e-07, |
|
"loss": 0.252, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 4.82126582278481, |
|
"grad_norm": 0.3019504910052267, |
|
"learning_rate": 1.2872356809130682e-07, |
|
"loss": 0.2109, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 4.8253164556962025, |
|
"grad_norm": 0.32065757038133985, |
|
"learning_rate": 1.2237433155004807e-07, |
|
"loss": 0.2494, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 4.829367088607595, |
|
"grad_norm": 0.30102668075288636, |
|
"learning_rate": 1.1618518667323886e-07, |
|
"loss": 0.2173, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 4.8334177215189875, |
|
"grad_norm": 0.3123157177572655, |
|
"learning_rate": 1.1015618330740385e-07, |
|
"loss": 0.2279, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 4.83746835443038, |
|
"grad_norm": 0.3157469625663789, |
|
"learning_rate": 1.042873700093061e-07, |
|
"loss": 0.2306, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 4.841518987341772, |
|
"grad_norm": 0.33132122688129206, |
|
"learning_rate": 9.857879404556291e-08, |
|
"loss": 0.2537, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 4.845569620253165, |
|
"grad_norm": 0.2934831489349099, |
|
"learning_rate": 9.303050139225722e-08, |
|
"loss": 0.2128, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 4.849620253164557, |
|
"grad_norm": 0.31719061567959217, |
|
"learning_rate": 8.76425367345779e-08, |
|
"loss": 0.2337, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 4.85367088607595, |
|
"grad_norm": 0.30046457283962047, |
|
"learning_rate": 8.241494346644897e-08, |
|
"loss": 0.2196, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 4.857721518987342, |
|
"grad_norm": 0.3372224091097723, |
|
"learning_rate": 7.734776369019204e-08, |
|
"loss": 0.2546, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 4.861772151898734, |
|
"grad_norm": 0.3056808105667589, |
|
"learning_rate": 7.244103821617332e-08, |
|
"loss": 0.1988, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 4.865822784810127, |
|
"grad_norm": 0.3248271832754149, |
|
"learning_rate": 6.769480656248606e-08, |
|
"loss": 0.2449, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 4.869873417721519, |
|
"grad_norm": 0.2989891384785881, |
|
"learning_rate": 6.310910695462635e-08, |
|
"loss": 0.2257, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 4.873924050632912, |
|
"grad_norm": 0.2971827837710027, |
|
"learning_rate": 5.8683976325191185e-08, |
|
"loss": 0.2128, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 4.877974683544304, |
|
"grad_norm": 0.3256911743399988, |
|
"learning_rate": 5.4419450313571984e-08, |
|
"loss": 0.2522, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 4.882025316455696, |
|
"grad_norm": 0.31335315529233415, |
|
"learning_rate": 5.031556326567488e-08, |
|
"loss": 0.2368, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 4.886075949367089, |
|
"grad_norm": 0.3050969317332606, |
|
"learning_rate": 4.637234823364312e-08, |
|
"loss": 0.2152, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 4.890126582278481, |
|
"grad_norm": 0.30508863582784757, |
|
"learning_rate": 4.258983697558838e-08, |
|
"loss": 0.2119, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 4.894177215189873, |
|
"grad_norm": 0.32683304168872856, |
|
"learning_rate": 3.896805995533548e-08, |
|
"loss": 0.2529, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 4.898227848101266, |
|
"grad_norm": 0.2984320695984988, |
|
"learning_rate": 3.550704634218028e-08, |
|
"loss": 0.2218, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 4.902278481012658, |
|
"grad_norm": 0.31198257852375655, |
|
"learning_rate": 3.2206824010647676e-08, |
|
"loss": 0.2382, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 4.90632911392405, |
|
"grad_norm": 0.31772663730430417, |
|
"learning_rate": 2.9067419540278476e-08, |
|
"loss": 0.2339, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 4.910379746835443, |
|
"grad_norm": 0.3136481097421503, |
|
"learning_rate": 2.6088858215400638e-08, |
|
"loss": 0.2217, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 4.914430379746835, |
|
"grad_norm": 0.3237310930280893, |
|
"learning_rate": 2.3271164024940564e-08, |
|
"loss": 0.244, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 4.918481012658228, |
|
"grad_norm": 0.30830687143814045, |
|
"learning_rate": 2.061435966221881e-08, |
|
"loss": 0.2201, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 4.92253164556962, |
|
"grad_norm": 0.28859669981602387, |
|
"learning_rate": 1.811846652477245e-08, |
|
"loss": 0.2214, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 4.926582278481012, |
|
"grad_norm": 0.3125673914997827, |
|
"learning_rate": 1.5783504714184106e-08, |
|
"loss": 0.2321, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 4.930632911392405, |
|
"grad_norm": 0.30212304122944694, |
|
"learning_rate": 1.360949303591097e-08, |
|
"loss": 0.2204, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 4.934683544303797, |
|
"grad_norm": 0.3007868437834777, |
|
"learning_rate": 1.1596448999144916e-08, |
|
"loss": 0.2229, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 4.93873417721519, |
|
"grad_norm": 0.3251508315171844, |
|
"learning_rate": 9.744388816668172e-09, |
|
"loss": 0.2402, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 4.942784810126582, |
|
"grad_norm": 0.3257649242557537, |
|
"learning_rate": 8.05332740472009e-09, |
|
"loss": 0.2485, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 4.946835443037974, |
|
"grad_norm": 0.29797245578476583, |
|
"learning_rate": 6.523278382872811e-09, |
|
"loss": 0.2085, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 4.950886075949367, |
|
"grad_norm": 0.3064023037990366, |
|
"learning_rate": 5.15425407393133e-09, |
|
"loss": 0.2279, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 4.954936708860759, |
|
"grad_norm": 0.3177673300477984, |
|
"learning_rate": 3.94626550383137e-09, |
|
"loss": 0.2397, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 4.958987341772152, |
|
"grad_norm": 0.3194018552377612, |
|
"learning_rate": 2.899322401546112e-09, |
|
"loss": 0.2108, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 4.9630379746835445, |
|
"grad_norm": 0.33571415442118485, |
|
"learning_rate": 2.013433199010706e-09, |
|
"loss": 0.2306, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 4.9670886075949365, |
|
"grad_norm": 0.31294789991593425, |
|
"learning_rate": 1.2886050310556563e-09, |
|
"loss": 0.2195, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 4.9711392405063295, |
|
"grad_norm": 0.3054999014245937, |
|
"learning_rate": 7.248437353468695e-10, |
|
"loss": 0.2315, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 4.975189873417722, |
|
"grad_norm": 0.3380929794220337, |
|
"learning_rate": 3.221538523412449e-10, |
|
"loss": 0.2301, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 4.979240506329114, |
|
"grad_norm": 0.3321144544759669, |
|
"learning_rate": 8.053862524670663e-11, |
|
"loss": 0.241, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 4.983291139240507, |
|
"grad_norm": 0.2915411264613831, |
|
"learning_rate": 0.0, |
|
"loss": 0.2115, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 4.983291139240507, |
|
"step": 1230, |
|
"total_flos": 2.3904400115949896e+18, |
|
"train_loss": 0.4335456542489005, |
|
"train_runtime": 120917.786, |
|
"train_samples_per_second": 1.307, |
|
"train_steps_per_second": 0.01 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 1230, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.3904400115949896e+18, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|