|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.993610223642173, |
|
"eval_steps": 500, |
|
"global_step": 390, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.012779552715654952, |
|
"grad_norm": 6.606391299183944, |
|
"learning_rate": 1.0256410256410257e-06, |
|
"loss": 1.0439, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.025559105431309903, |
|
"grad_norm": 6.580557712601049, |
|
"learning_rate": 2.0512820512820513e-06, |
|
"loss": 1.0507, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.038338658146964855, |
|
"grad_norm": 6.509005644872208, |
|
"learning_rate": 3.0769230769230774e-06, |
|
"loss": 1.0409, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.051118210862619806, |
|
"grad_norm": 5.947065826738325, |
|
"learning_rate": 4.102564102564103e-06, |
|
"loss": 1.0366, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.06389776357827476, |
|
"grad_norm": 4.4097960543741825, |
|
"learning_rate": 5.128205128205128e-06, |
|
"loss": 0.9889, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.07667731629392971, |
|
"grad_norm": 2.818415172846207, |
|
"learning_rate": 6.153846153846155e-06, |
|
"loss": 0.9628, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.08945686900958466, |
|
"grad_norm": 2.6496653931802, |
|
"learning_rate": 7.17948717948718e-06, |
|
"loss": 0.9587, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.10223642172523961, |
|
"grad_norm": 4.336870837964605, |
|
"learning_rate": 8.205128205128205e-06, |
|
"loss": 0.9602, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.11501597444089456, |
|
"grad_norm": 4.298942553498102, |
|
"learning_rate": 9.230769230769232e-06, |
|
"loss": 0.9378, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.12779552715654952, |
|
"grad_norm": 4.186284730314603, |
|
"learning_rate": 1.0256410256410256e-05, |
|
"loss": 0.912, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.14057507987220447, |
|
"grad_norm": 3.8257868758679257, |
|
"learning_rate": 1.1282051282051283e-05, |
|
"loss": 0.8958, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.15335463258785942, |
|
"grad_norm": 2.5816728496761097, |
|
"learning_rate": 1.230769230769231e-05, |
|
"loss": 0.8622, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.16613418530351437, |
|
"grad_norm": 1.8503542682907892, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 0.8189, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.17891373801916932, |
|
"grad_norm": 1.9534484580657967, |
|
"learning_rate": 1.435897435897436e-05, |
|
"loss": 0.8372, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.19169329073482427, |
|
"grad_norm": 1.516048850182833, |
|
"learning_rate": 1.5384615384615387e-05, |
|
"loss": 0.8026, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.20447284345047922, |
|
"grad_norm": 1.0874051240964258, |
|
"learning_rate": 1.641025641025641e-05, |
|
"loss": 0.7636, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.21725239616613418, |
|
"grad_norm": 1.1631866901137267, |
|
"learning_rate": 1.7435897435897438e-05, |
|
"loss": 0.7816, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.23003194888178913, |
|
"grad_norm": 1.2171907306151695, |
|
"learning_rate": 1.8461538461538465e-05, |
|
"loss": 0.7546, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.24281150159744408, |
|
"grad_norm": 0.9490832264240321, |
|
"learning_rate": 1.9487179487179488e-05, |
|
"loss": 0.7234, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.25559105431309903, |
|
"grad_norm": 0.9462270136925807, |
|
"learning_rate": 2.0512820512820512e-05, |
|
"loss": 0.7333, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.268370607028754, |
|
"grad_norm": 1.1591760710159509, |
|
"learning_rate": 2.153846153846154e-05, |
|
"loss": 0.748, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.28115015974440893, |
|
"grad_norm": 0.7918148886565669, |
|
"learning_rate": 2.2564102564102566e-05, |
|
"loss": 0.7159, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.2939297124600639, |
|
"grad_norm": 0.9632675527150978, |
|
"learning_rate": 2.3589743589743593e-05, |
|
"loss": 0.6958, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.30670926517571884, |
|
"grad_norm": 0.9408213867843294, |
|
"learning_rate": 2.461538461538462e-05, |
|
"loss": 0.7061, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.3194888178913738, |
|
"grad_norm": 0.7998105188303717, |
|
"learning_rate": 2.5641025641025646e-05, |
|
"loss": 0.7056, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.33226837060702874, |
|
"grad_norm": 0.9885203953583884, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 0.7043, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.3450479233226837, |
|
"grad_norm": 0.8586191987847381, |
|
"learning_rate": 2.7692307692307694e-05, |
|
"loss": 0.689, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.35782747603833864, |
|
"grad_norm": 0.8572793221628082, |
|
"learning_rate": 2.871794871794872e-05, |
|
"loss": 0.6885, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.3706070287539936, |
|
"grad_norm": 0.703637939749516, |
|
"learning_rate": 2.9743589743589747e-05, |
|
"loss": 0.6829, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.38338658146964855, |
|
"grad_norm": 0.9933804760621775, |
|
"learning_rate": 3.0769230769230774e-05, |
|
"loss": 0.6938, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.3961661341853035, |
|
"grad_norm": 0.8925386250116185, |
|
"learning_rate": 3.1794871794871795e-05, |
|
"loss": 0.6863, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.40894568690095845, |
|
"grad_norm": 1.0350535569861223, |
|
"learning_rate": 3.282051282051282e-05, |
|
"loss": 0.6893, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.4217252396166134, |
|
"grad_norm": 1.1445914450631147, |
|
"learning_rate": 3.384615384615385e-05, |
|
"loss": 0.6708, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.43450479233226835, |
|
"grad_norm": 0.8753384928477117, |
|
"learning_rate": 3.4871794871794875e-05, |
|
"loss": 0.6939, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.4472843450479233, |
|
"grad_norm": 0.8393311315022303, |
|
"learning_rate": 3.58974358974359e-05, |
|
"loss": 0.6661, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.46006389776357826, |
|
"grad_norm": 1.1565786569988288, |
|
"learning_rate": 3.692307692307693e-05, |
|
"loss": 0.6669, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.4728434504792332, |
|
"grad_norm": 0.6967547302370791, |
|
"learning_rate": 3.794871794871795e-05, |
|
"loss": 0.6626, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.48562300319488816, |
|
"grad_norm": 0.8751186173570553, |
|
"learning_rate": 3.8974358974358976e-05, |
|
"loss": 0.6393, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.4984025559105431, |
|
"grad_norm": 1.2770961083050876, |
|
"learning_rate": 4e-05, |
|
"loss": 0.6676, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.5111821086261981, |
|
"grad_norm": 1.2772025124958417, |
|
"learning_rate": 3.9999198907597046e-05, |
|
"loss": 0.6667, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.5239616613418531, |
|
"grad_norm": 0.8066890893797722, |
|
"learning_rate": 3.9996795694563096e-05, |
|
"loss": 0.6499, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.536741214057508, |
|
"grad_norm": 1.579755868796052, |
|
"learning_rate": 3.999279055341771e-05, |
|
"loss": 0.6569, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.549520766773163, |
|
"grad_norm": 0.706322047158653, |
|
"learning_rate": 3.998718380500971e-05, |
|
"loss": 0.6541, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.5623003194888179, |
|
"grad_norm": 1.586782594427505, |
|
"learning_rate": 3.997997589849145e-05, |
|
"loss": 0.652, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.5750798722044729, |
|
"grad_norm": 1.0807329202383447, |
|
"learning_rate": 3.9971167411282835e-05, |
|
"loss": 0.6556, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.5878594249201278, |
|
"grad_norm": 1.6536678088892875, |
|
"learning_rate": 3.99607590490251e-05, |
|
"loss": 0.6444, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.6006389776357828, |
|
"grad_norm": 1.1989223290936704, |
|
"learning_rate": 3.9948751645524235e-05, |
|
"loss": 0.6581, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.6134185303514377, |
|
"grad_norm": 1.5873983130186309, |
|
"learning_rate": 3.9935146162684206e-05, |
|
"loss": 0.634, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.6261980830670927, |
|
"grad_norm": 1.2371929427150843, |
|
"learning_rate": 3.9919943690429906e-05, |
|
"loss": 0.6423, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.6389776357827476, |
|
"grad_norm": 1.373873539863577, |
|
"learning_rate": 3.9903145446619837e-05, |
|
"loss": 0.6462, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.6517571884984026, |
|
"grad_norm": 1.1016235774139902, |
|
"learning_rate": 3.9884752776948564e-05, |
|
"loss": 0.6401, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.6645367412140575, |
|
"grad_norm": 1.1209870974953269, |
|
"learning_rate": 3.9864767154838864e-05, |
|
"loss": 0.6367, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.6773162939297125, |
|
"grad_norm": 1.0860208778406302, |
|
"learning_rate": 3.9843190181323744e-05, |
|
"loss": 0.6417, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.6900958466453674, |
|
"grad_norm": 1.1698853570343006, |
|
"learning_rate": 3.982002358491817e-05, |
|
"loss": 0.6434, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.7028753993610224, |
|
"grad_norm": 0.9936615094214557, |
|
"learning_rate": 3.979526922148058e-05, |
|
"loss": 0.6428, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.7156549520766773, |
|
"grad_norm": 1.0541150791659504, |
|
"learning_rate": 3.9768929074064206e-05, |
|
"loss": 0.6509, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.7284345047923323, |
|
"grad_norm": 0.9556652153918601, |
|
"learning_rate": 3.9741005252758255e-05, |
|
"loss": 0.6287, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.7412140575079872, |
|
"grad_norm": 0.954056106286679, |
|
"learning_rate": 3.971149999451886e-05, |
|
"loss": 0.6293, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.7539936102236422, |
|
"grad_norm": 1.2607602622746636, |
|
"learning_rate": 3.9680415662989806e-05, |
|
"loss": 0.6451, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.7667731629392971, |
|
"grad_norm": 0.638676278829799, |
|
"learning_rate": 3.9647754748313294e-05, |
|
"loss": 0.6356, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.7795527156549521, |
|
"grad_norm": 1.0681234854801602, |
|
"learning_rate": 3.96135198669304e-05, |
|
"loss": 0.6368, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.792332268370607, |
|
"grad_norm": 1.033023863786734, |
|
"learning_rate": 3.957771376137144e-05, |
|
"loss": 0.6062, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.805111821086262, |
|
"grad_norm": 0.9542423560838191, |
|
"learning_rate": 3.954033930003634e-05, |
|
"loss": 0.6273, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.8178913738019169, |
|
"grad_norm": 0.9254432594288557, |
|
"learning_rate": 3.9501399476964806e-05, |
|
"loss": 0.6228, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.8306709265175719, |
|
"grad_norm": 0.7617576652045225, |
|
"learning_rate": 3.946089741159648e-05, |
|
"loss": 0.6106, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.8434504792332268, |
|
"grad_norm": 0.6249220755858457, |
|
"learning_rate": 3.9418836348521045e-05, |
|
"loss": 0.6333, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.8562300319488818, |
|
"grad_norm": 0.977881184781753, |
|
"learning_rate": 3.937521965721831e-05, |
|
"loss": 0.6158, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.8690095846645367, |
|
"grad_norm": 1.0978122758989277, |
|
"learning_rate": 3.933005083178828e-05, |
|
"loss": 0.627, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.8817891373801917, |
|
"grad_norm": 0.657146186705749, |
|
"learning_rate": 3.928333349067125e-05, |
|
"loss": 0.6355, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.8945686900958466, |
|
"grad_norm": 0.6445634396508156, |
|
"learning_rate": 3.923507137635792e-05, |
|
"loss": 0.6269, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.9073482428115016, |
|
"grad_norm": 0.8852782221046644, |
|
"learning_rate": 3.9185268355089606e-05, |
|
"loss": 0.6358, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.9201277955271565, |
|
"grad_norm": 0.8747591576573158, |
|
"learning_rate": 3.913392841654851e-05, |
|
"loss": 0.6365, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.9329073482428115, |
|
"grad_norm": 0.7450313154115504, |
|
"learning_rate": 3.9081055673538093e-05, |
|
"loss": 0.6213, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.9456869009584664, |
|
"grad_norm": 0.7884996548731228, |
|
"learning_rate": 3.902665436165364e-05, |
|
"loss": 0.6176, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.9584664536741214, |
|
"grad_norm": 0.7688130173646047, |
|
"learning_rate": 3.897072883894291e-05, |
|
"loss": 0.6165, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.9712460063897763, |
|
"grad_norm": 0.6191938351479404, |
|
"learning_rate": 3.8913283585557054e-05, |
|
"loss": 0.6137, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.9840255591054313, |
|
"grad_norm": 0.7872267173045574, |
|
"learning_rate": 3.885432320339167e-05, |
|
"loss": 0.6186, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.9968051118210862, |
|
"grad_norm": 0.895477964352014, |
|
"learning_rate": 3.879385241571817e-05, |
|
"loss": 0.6167, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.0095846645367412, |
|
"grad_norm": 0.8308432476526806, |
|
"learning_rate": 3.873187606680543e-05, |
|
"loss": 0.5632, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.0223642172523961, |
|
"grad_norm": 0.5713434732452398, |
|
"learning_rate": 3.866839912153168e-05, |
|
"loss": 0.5521, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.035143769968051, |
|
"grad_norm": 0.7142369781512922, |
|
"learning_rate": 3.860342666498677e-05, |
|
"loss": 0.5572, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.0479233226837061, |
|
"grad_norm": 0.799601323958914, |
|
"learning_rate": 3.853696390206484e-05, |
|
"loss": 0.5544, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.060702875399361, |
|
"grad_norm": 0.7455938478793749, |
|
"learning_rate": 3.846901615704734e-05, |
|
"loss": 0.5552, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.073482428115016, |
|
"grad_norm": 0.6034988797649778, |
|
"learning_rate": 3.839958887317649e-05, |
|
"loss": 0.5532, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.0862619808306708, |
|
"grad_norm": 0.6770389869828726, |
|
"learning_rate": 3.832868761221926e-05, |
|
"loss": 0.5451, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.099041533546326, |
|
"grad_norm": 0.954540528268312, |
|
"learning_rate": 3.825631805402182e-05, |
|
"loss": 0.5503, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 1.1118210862619808, |
|
"grad_norm": 0.8213749729805485, |
|
"learning_rate": 3.818248599605448e-05, |
|
"loss": 0.5601, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 1.1246006389776357, |
|
"grad_norm": 0.741351276087373, |
|
"learning_rate": 3.810719735294731e-05, |
|
"loss": 0.5654, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.1373801916932909, |
|
"grad_norm": 0.8481794308869871, |
|
"learning_rate": 3.8030458156016326e-05, |
|
"loss": 0.5565, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.1501597444089458, |
|
"grad_norm": 0.7594004526515341, |
|
"learning_rate": 3.795227455278029e-05, |
|
"loss": 0.5545, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.1629392971246006, |
|
"grad_norm": 0.632168169185403, |
|
"learning_rate": 3.787265280646825e-05, |
|
"loss": 0.5457, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.1757188498402555, |
|
"grad_norm": 0.7031486638124088, |
|
"learning_rate": 3.7791599295517825e-05, |
|
"loss": 0.5468, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.1884984025559104, |
|
"grad_norm": 0.6808588700454326, |
|
"learning_rate": 3.7709120513064196e-05, |
|
"loss": 0.5536, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.2012779552715656, |
|
"grad_norm": 0.6921396282319281, |
|
"learning_rate": 3.762522306641998e-05, |
|
"loss": 0.5506, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.2140575079872205, |
|
"grad_norm": 0.5121947510357499, |
|
"learning_rate": 3.7539913676545874e-05, |
|
"loss": 0.5522, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.2268370607028753, |
|
"grad_norm": 0.6392685476020531, |
|
"learning_rate": 3.745319917751229e-05, |
|
"loss": 0.5558, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.2396166134185305, |
|
"grad_norm": 0.5837442702174773, |
|
"learning_rate": 3.736508651595188e-05, |
|
"loss": 0.5578, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.2523961661341854, |
|
"grad_norm": 0.4530666164198027, |
|
"learning_rate": 3.727558275050301e-05, |
|
"loss": 0.5458, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.2651757188498403, |
|
"grad_norm": 0.602372240522267, |
|
"learning_rate": 3.718469505124434e-05, |
|
"loss": 0.5501, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.2779552715654952, |
|
"grad_norm": 0.5644538160733871, |
|
"learning_rate": 3.709243069912041e-05, |
|
"loss": 0.5443, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.29073482428115, |
|
"grad_norm": 0.6022811499264725, |
|
"learning_rate": 3.699879708535838e-05, |
|
"loss": 0.5487, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.3035143769968052, |
|
"grad_norm": 0.550906743250385, |
|
"learning_rate": 3.69038017108759e-05, |
|
"loss": 0.5509, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.31629392971246, |
|
"grad_norm": 0.6233956386597138, |
|
"learning_rate": 3.680745218568026e-05, |
|
"loss": 0.5534, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.329073482428115, |
|
"grad_norm": 0.5182024410893715, |
|
"learning_rate": 3.6709756228258735e-05, |
|
"loss": 0.556, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.34185303514377, |
|
"grad_norm": 0.564945200040499, |
|
"learning_rate": 3.6610721664960236e-05, |
|
"loss": 0.5512, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.354632587859425, |
|
"grad_norm": 0.612513184636645, |
|
"learning_rate": 3.65103564293684e-05, |
|
"loss": 0.5408, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.3674121405750799, |
|
"grad_norm": 0.5942673792674769, |
|
"learning_rate": 3.640866856166601e-05, |
|
"loss": 0.5451, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.3801916932907348, |
|
"grad_norm": 0.588459067623318, |
|
"learning_rate": 3.6305666207990886e-05, |
|
"loss": 0.5485, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.3929712460063897, |
|
"grad_norm": 0.4140189623419551, |
|
"learning_rate": 3.6201357619783336e-05, |
|
"loss": 0.5438, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.4057507987220448, |
|
"grad_norm": 0.5300715964850526, |
|
"learning_rate": 3.609575115312511e-05, |
|
"loss": 0.5539, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.4185303514376997, |
|
"grad_norm": 0.5406759521793079, |
|
"learning_rate": 3.598885526807003e-05, |
|
"loss": 0.5466, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.4313099041533546, |
|
"grad_norm": 0.47685129470426335, |
|
"learning_rate": 3.5880678527966224e-05, |
|
"loss": 0.5438, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.4440894568690097, |
|
"grad_norm": 0.5398592076049669, |
|
"learning_rate": 3.577122959877017e-05, |
|
"loss": 0.5419, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.4568690095846646, |
|
"grad_norm": 0.5490690544479012, |
|
"learning_rate": 3.566051724835245e-05, |
|
"loss": 0.5507, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.4696485623003195, |
|
"grad_norm": 0.47169320271914167, |
|
"learning_rate": 3.554855034579532e-05, |
|
"loss": 0.5346, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.4824281150159744, |
|
"grad_norm": 0.5414566322709992, |
|
"learning_rate": 3.5435337860682304e-05, |
|
"loss": 0.5423, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.4952076677316293, |
|
"grad_norm": 0.4186816440677675, |
|
"learning_rate": 3.532088886237956e-05, |
|
"loss": 0.5655, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.5079872204472844, |
|
"grad_norm": 0.5111754842170994, |
|
"learning_rate": 3.520521251930941e-05, |
|
"loss": 0.5567, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.5207667731629393, |
|
"grad_norm": 0.4889729745568838, |
|
"learning_rate": 3.5088318098215805e-05, |
|
"loss": 0.5494, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.5335463258785942, |
|
"grad_norm": 0.5970983261404058, |
|
"learning_rate": 3.497021496342203e-05, |
|
"loss": 0.5485, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.5463258785942493, |
|
"grad_norm": 0.6244174589560836, |
|
"learning_rate": 3.485091257608047e-05, |
|
"loss": 0.5521, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.5591054313099042, |
|
"grad_norm": 0.5150332399753115, |
|
"learning_rate": 3.473042049341474e-05, |
|
"loss": 0.5263, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.571884984025559, |
|
"grad_norm": 0.5062479419125213, |
|
"learning_rate": 3.4608748367954064e-05, |
|
"loss": 0.5662, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.5846645367412142, |
|
"grad_norm": 0.5795778408417265, |
|
"learning_rate": 3.4485905946759965e-05, |
|
"loss": 0.5388, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.5974440894568689, |
|
"grad_norm": 0.6101565234254713, |
|
"learning_rate": 3.4361903070645484e-05, |
|
"loss": 0.5468, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.610223642172524, |
|
"grad_norm": 0.5637863425267695, |
|
"learning_rate": 3.423674967338681e-05, |
|
"loss": 0.5675, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.623003194888179, |
|
"grad_norm": 0.5353085598782966, |
|
"learning_rate": 3.411045578092754e-05, |
|
"loss": 0.5318, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.6357827476038338, |
|
"grad_norm": 0.6536912429537135, |
|
"learning_rate": 3.398303151057543e-05, |
|
"loss": 0.5605, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.648562300319489, |
|
"grad_norm": 0.4899086385884318, |
|
"learning_rate": 3.385448707019199e-05, |
|
"loss": 0.5538, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.6613418530351438, |
|
"grad_norm": 0.5325946949888003, |
|
"learning_rate": 3.372483275737468e-05, |
|
"loss": 0.5588, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.6741214057507987, |
|
"grad_norm": 0.5403278546775407, |
|
"learning_rate": 3.359407895863199e-05, |
|
"loss": 0.5379, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.6869009584664538, |
|
"grad_norm": 0.5139765621340018, |
|
"learning_rate": 3.34622361485514e-05, |
|
"loss": 0.5425, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.6996805111821085, |
|
"grad_norm": 0.5582667520807697, |
|
"learning_rate": 3.332931488896029e-05, |
|
"loss": 0.5561, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.7124600638977636, |
|
"grad_norm": 0.5725496792559248, |
|
"learning_rate": 3.319532582807977e-05, |
|
"loss": 0.538, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.7252396166134185, |
|
"grad_norm": 0.5796487796988573, |
|
"learning_rate": 3.30602796996717e-05, |
|
"loss": 0.5461, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.7380191693290734, |
|
"grad_norm": 0.5264811040173926, |
|
"learning_rate": 3.2924187322178865e-05, |
|
"loss": 0.5297, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.7507987220447285, |
|
"grad_norm": 0.5094443596891554, |
|
"learning_rate": 3.278705959785821e-05, |
|
"loss": 0.5425, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.7635782747603834, |
|
"grad_norm": 0.560498854774084, |
|
"learning_rate": 3.2648907511907544e-05, |
|
"loss": 0.5288, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.7763578274760383, |
|
"grad_norm": 0.5650237995626031, |
|
"learning_rate": 3.250974213158555e-05, |
|
"loss": 0.5402, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.7891373801916934, |
|
"grad_norm": 0.5736887246028277, |
|
"learning_rate": 3.23695746053251e-05, |
|
"loss": 0.5462, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.8019169329073481, |
|
"grad_norm": 0.6448216840943815, |
|
"learning_rate": 3.222841616184025e-05, |
|
"loss": 0.5457, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.8146964856230032, |
|
"grad_norm": 0.5409547316064893, |
|
"learning_rate": 3.208627810922665e-05, |
|
"loss": 0.5467, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.8274760383386581, |
|
"grad_norm": 0.5281660531292864, |
|
"learning_rate": 3.194317183405573e-05, |
|
"loss": 0.5584, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.840255591054313, |
|
"grad_norm": 0.5642907834669127, |
|
"learning_rate": 3.1799108800462466e-05, |
|
"loss": 0.5397, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.8530351437699681, |
|
"grad_norm": 0.6958435076312217, |
|
"learning_rate": 3.1654100549227024e-05, |
|
"loss": 0.5619, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.865814696485623, |
|
"grad_norm": 0.44915446473186066, |
|
"learning_rate": 3.1508158696850275e-05, |
|
"loss": 0.5371, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.878594249201278, |
|
"grad_norm": 0.7415779882754461, |
|
"learning_rate": 3.136129493462312e-05, |
|
"loss": 0.5459, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.891373801916933, |
|
"grad_norm": 0.5590435801827939, |
|
"learning_rate": 3.121352102768998e-05, |
|
"loss": 0.5629, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.9041533546325877, |
|
"grad_norm": 0.6262150274271875, |
|
"learning_rate": 3.106484881410628e-05, |
|
"loss": 0.5475, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.9169329073482428, |
|
"grad_norm": 0.7182303942682138, |
|
"learning_rate": 3.091529020389009e-05, |
|
"loss": 0.5446, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.9297124600638977, |
|
"grad_norm": 0.4340720332975557, |
|
"learning_rate": 3.076485717806808e-05, |
|
"loss": 0.5435, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.9424920127795526, |
|
"grad_norm": 0.572990989223066, |
|
"learning_rate": 3.061356178771564e-05, |
|
"loss": 0.5419, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.9552715654952078, |
|
"grad_norm": 0.5761722054308671, |
|
"learning_rate": 3.0461416152991555e-05, |
|
"loss": 0.545, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.9680511182108626, |
|
"grad_norm": 0.47715951155810205, |
|
"learning_rate": 3.0308432462167045e-05, |
|
"loss": 0.5408, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.9808306709265175, |
|
"grad_norm": 0.44500377408250574, |
|
"learning_rate": 3.015462297064936e-05, |
|
"loss": 0.5281, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.9936102236421727, |
|
"grad_norm": 0.5298218047198479, |
|
"learning_rate": 3.0000000000000004e-05, |
|
"loss": 0.5289, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 2.0063897763578273, |
|
"grad_norm": 0.486757795281033, |
|
"learning_rate": 2.98445759369477e-05, |
|
"loss": 0.5006, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 2.0191693290734825, |
|
"grad_norm": 0.46748405855369674, |
|
"learning_rate": 2.9688363232396056e-05, |
|
"loss": 0.4602, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 2.0319488817891376, |
|
"grad_norm": 0.9032320953813431, |
|
"learning_rate": 2.9531374400426158e-05, |
|
"loss": 0.4938, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 2.0447284345047922, |
|
"grad_norm": 0.7141637648465875, |
|
"learning_rate": 2.9373622017294075e-05, |
|
"loss": 0.4683, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.0575079872204474, |
|
"grad_norm": 0.6638457299888288, |
|
"learning_rate": 2.9215118720423375e-05, |
|
"loss": 0.4655, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 2.070287539936102, |
|
"grad_norm": 0.5740802765073745, |
|
"learning_rate": 2.9055877207392752e-05, |
|
"loss": 0.4664, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 2.083067092651757, |
|
"grad_norm": 0.541980004503229, |
|
"learning_rate": 2.8895910234918828e-05, |
|
"loss": 0.4594, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 2.0958466453674123, |
|
"grad_norm": 0.7230973797959268, |
|
"learning_rate": 2.873523061783426e-05, |
|
"loss": 0.4535, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 2.108626198083067, |
|
"grad_norm": 0.5775309097959617, |
|
"learning_rate": 2.8573851228061084e-05, |
|
"loss": 0.4469, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 2.121405750798722, |
|
"grad_norm": 0.7114460224550682, |
|
"learning_rate": 2.8411784993579633e-05, |
|
"loss": 0.4453, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 2.134185303514377, |
|
"grad_norm": 0.5544199950880329, |
|
"learning_rate": 2.8249044897392814e-05, |
|
"loss": 0.4447, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 2.146964856230032, |
|
"grad_norm": 0.7751203169810186, |
|
"learning_rate": 2.80856439764861e-05, |
|
"loss": 0.4627, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 2.159744408945687, |
|
"grad_norm": 0.6633500729778636, |
|
"learning_rate": 2.792159532078314e-05, |
|
"loss": 0.4621, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 2.1725239616613417, |
|
"grad_norm": 0.5933550753958191, |
|
"learning_rate": 2.77569120720971e-05, |
|
"loss": 0.4568, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 2.1853035143769968, |
|
"grad_norm": 0.7185282353498863, |
|
"learning_rate": 2.7591607423077932e-05, |
|
"loss": 0.4669, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 2.198083067092652, |
|
"grad_norm": 0.4752269249605016, |
|
"learning_rate": 2.7425694616155474e-05, |
|
"loss": 0.4418, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 2.2108626198083066, |
|
"grad_norm": 0.5405713655934162, |
|
"learning_rate": 2.7259186942478656e-05, |
|
"loss": 0.4496, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 2.2236421725239617, |
|
"grad_norm": 0.5215249693639501, |
|
"learning_rate": 2.7092097740850712e-05, |
|
"loss": 0.4599, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 2.236421725239617, |
|
"grad_norm": 0.42509149953729447, |
|
"learning_rate": 2.692444039666066e-05, |
|
"loss": 0.4538, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 2.2492012779552715, |
|
"grad_norm": 0.443104585356185, |
|
"learning_rate": 2.6756228340810946e-05, |
|
"loss": 0.4423, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 2.2619808306709266, |
|
"grad_norm": 0.4244498485086766, |
|
"learning_rate": 2.6587475048641596e-05, |
|
"loss": 0.4581, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 2.2747603833865817, |
|
"grad_norm": 0.4427678121202283, |
|
"learning_rate": 2.6418194038850634e-05, |
|
"loss": 0.4622, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 2.2875399361022364, |
|
"grad_norm": 0.44158722004763423, |
|
"learning_rate": 2.624839887241115e-05, |
|
"loss": 0.4666, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 2.3003194888178915, |
|
"grad_norm": 0.4314748854864371, |
|
"learning_rate": 2.607810315148494e-05, |
|
"loss": 0.4681, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 2.313099041533546, |
|
"grad_norm": 0.41655120357135617, |
|
"learning_rate": 2.5907320518332827e-05, |
|
"loss": 0.4539, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 2.3258785942492013, |
|
"grad_norm": 0.4203925750652448, |
|
"learning_rate": 2.5736064654221808e-05, |
|
"loss": 0.4427, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 2.3386581469648564, |
|
"grad_norm": 0.3893882507171406, |
|
"learning_rate": 2.5564349278329056e-05, |
|
"loss": 0.4575, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 2.351437699680511, |
|
"grad_norm": 0.39085497976651834, |
|
"learning_rate": 2.539218814664288e-05, |
|
"loss": 0.4463, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 2.364217252396166, |
|
"grad_norm": 0.4703088592774784, |
|
"learning_rate": 2.521959505086075e-05, |
|
"loss": 0.4521, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 2.376996805111821, |
|
"grad_norm": 0.48500913272959334, |
|
"learning_rate": 2.5046583817284437e-05, |
|
"loss": 0.4608, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 2.389776357827476, |
|
"grad_norm": 0.43887176139587314, |
|
"learning_rate": 2.487316830571244e-05, |
|
"loss": 0.4835, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 2.402555910543131, |
|
"grad_norm": 0.464584961974617, |
|
"learning_rate": 2.4699362408329646e-05, |
|
"loss": 0.4604, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 2.415335463258786, |
|
"grad_norm": 0.4154167662123907, |
|
"learning_rate": 2.4525180048594452e-05, |
|
"loss": 0.4446, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 2.428115015974441, |
|
"grad_norm": 0.46994597432826285, |
|
"learning_rate": 2.435063518012335e-05, |
|
"loss": 0.4723, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 2.440894568690096, |
|
"grad_norm": 0.39642847919224067, |
|
"learning_rate": 2.4175741785573177e-05, |
|
"loss": 0.4441, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 2.4536741214057507, |
|
"grad_norm": 0.42331312684198885, |
|
"learning_rate": 2.4000513875520892e-05, |
|
"loss": 0.4751, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 2.466453674121406, |
|
"grad_norm": 0.42637579603720094, |
|
"learning_rate": 2.3824965487341247e-05, |
|
"loss": 0.4503, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 2.479233226837061, |
|
"grad_norm": 0.4176770140541874, |
|
"learning_rate": 2.3649110684082258e-05, |
|
"loss": 0.4614, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 2.4920127795527156, |
|
"grad_norm": 0.4383167963140633, |
|
"learning_rate": 2.3472963553338614e-05, |
|
"loss": 0.4475, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 2.5047923322683707, |
|
"grad_norm": 0.3533765206549483, |
|
"learning_rate": 2.3296538206123134e-05, |
|
"loss": 0.4717, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 2.5175718849840254, |
|
"grad_norm": 0.4802148000025239, |
|
"learning_rate": 2.311984877573636e-05, |
|
"loss": 0.4587, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 2.5303514376996805, |
|
"grad_norm": 0.4884867524733879, |
|
"learning_rate": 2.2942909416634326e-05, |
|
"loss": 0.471, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 2.543130990415335, |
|
"grad_norm": 0.38302934162963237, |
|
"learning_rate": 2.2765734303294666e-05, |
|
"loss": 0.4663, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 2.5559105431309903, |
|
"grad_norm": 0.4919891722403176, |
|
"learning_rate": 2.2588337629081107e-05, |
|
"loss": 0.4624, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.5686900958466454, |
|
"grad_norm": 0.42439763122441787, |
|
"learning_rate": 2.2410733605106462e-05, |
|
"loss": 0.4733, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 2.5814696485623, |
|
"grad_norm": 0.455600248616356, |
|
"learning_rate": 2.2232936459094158e-05, |
|
"loss": 0.4719, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 2.594249201277955, |
|
"grad_norm": 0.37772141842234497, |
|
"learning_rate": 2.205496043423849e-05, |
|
"loss": 0.4643, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 2.6070287539936103, |
|
"grad_norm": 0.39487928087355095, |
|
"learning_rate": 2.1876819788063586e-05, |
|
"loss": 0.4599, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 2.619808306709265, |
|
"grad_norm": 0.37752084464288793, |
|
"learning_rate": 2.16985287912813e-05, |
|
"loss": 0.4611, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 2.63258785942492, |
|
"grad_norm": 0.41690300860275786, |
|
"learning_rate": 2.1520101726647922e-05, |
|
"loss": 0.4656, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 2.6453674121405752, |
|
"grad_norm": 0.35278905850290787, |
|
"learning_rate": 2.1341552887820048e-05, |
|
"loss": 0.4509, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 2.65814696485623, |
|
"grad_norm": 0.3952082192254116, |
|
"learning_rate": 2.1162896578209517e-05, |
|
"loss": 0.4552, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 2.670926517571885, |
|
"grad_norm": 0.37698129545807996, |
|
"learning_rate": 2.0984147109837564e-05, |
|
"loss": 0.4587, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 2.68370607028754, |
|
"grad_norm": 0.3385957900251669, |
|
"learning_rate": 2.0805318802188307e-05, |
|
"loss": 0.4708, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.696485623003195, |
|
"grad_norm": 0.3715034095313426, |
|
"learning_rate": 2.0626425981061608e-05, |
|
"loss": 0.4494, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 2.70926517571885, |
|
"grad_norm": 0.3251290710767365, |
|
"learning_rate": 2.0447482977425465e-05, |
|
"loss": 0.4563, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 2.722044728434505, |
|
"grad_norm": 0.37877042575127295, |
|
"learning_rate": 2.0268504126267952e-05, |
|
"loss": 0.4495, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 2.7348242811501597, |
|
"grad_norm": 0.3513613274662141, |
|
"learning_rate": 2.008950376544887e-05, |
|
"loss": 0.4681, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 2.747603833865815, |
|
"grad_norm": 0.3673333816439615, |
|
"learning_rate": 1.9910496234551132e-05, |
|
"loss": 0.4579, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 2.7603833865814695, |
|
"grad_norm": 0.4002781230499196, |
|
"learning_rate": 1.9731495873732055e-05, |
|
"loss": 0.4631, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 2.7731629392971247, |
|
"grad_norm": 0.4144225996048057, |
|
"learning_rate": 1.9552517022574542e-05, |
|
"loss": 0.4588, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 2.7859424920127793, |
|
"grad_norm": 0.38483992779543585, |
|
"learning_rate": 1.93735740189384e-05, |
|
"loss": 0.4554, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 2.7987220447284344, |
|
"grad_norm": 0.39775925575971094, |
|
"learning_rate": 1.9194681197811703e-05, |
|
"loss": 0.4379, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 2.8115015974440896, |
|
"grad_norm": 0.38104448687547743, |
|
"learning_rate": 1.901585289016244e-05, |
|
"loss": 0.4536, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.8242811501597442, |
|
"grad_norm": 0.37218167438181177, |
|
"learning_rate": 1.8837103421790486e-05, |
|
"loss": 0.4546, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 2.8370607028753994, |
|
"grad_norm": 0.3238264848569907, |
|
"learning_rate": 1.8658447112179952e-05, |
|
"loss": 0.4971, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 2.8498402555910545, |
|
"grad_norm": 0.3669356917599918, |
|
"learning_rate": 1.8479898273352084e-05, |
|
"loss": 0.4636, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 2.862619808306709, |
|
"grad_norm": 0.34897969771117815, |
|
"learning_rate": 1.83014712087187e-05, |
|
"loss": 0.4454, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 2.8753993610223643, |
|
"grad_norm": 0.3531428513148952, |
|
"learning_rate": 1.8123180211936417e-05, |
|
"loss": 0.4321, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.8881789137380194, |
|
"grad_norm": 0.3155910003073882, |
|
"learning_rate": 1.794503956576152e-05, |
|
"loss": 0.4737, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 2.900958466453674, |
|
"grad_norm": 0.3514454764533909, |
|
"learning_rate": 1.776706354090585e-05, |
|
"loss": 0.4699, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 2.913738019169329, |
|
"grad_norm": 0.32261320466049764, |
|
"learning_rate": 1.758926639489354e-05, |
|
"loss": 0.449, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 2.9265175718849843, |
|
"grad_norm": 0.31699325291611036, |
|
"learning_rate": 1.7411662370918893e-05, |
|
"loss": 0.441, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 2.939297124600639, |
|
"grad_norm": 0.3465501067615838, |
|
"learning_rate": 1.7234265696705344e-05, |
|
"loss": 0.4747, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.952076677316294, |
|
"grad_norm": 0.3390277128709767, |
|
"learning_rate": 1.7057090583365678e-05, |
|
"loss": 0.4598, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 2.9648562300319488, |
|
"grad_norm": 0.36925982516132494, |
|
"learning_rate": 1.6880151224263646e-05, |
|
"loss": 0.4641, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 2.977635782747604, |
|
"grad_norm": 0.32500966228489814, |
|
"learning_rate": 1.6703461793876876e-05, |
|
"loss": 0.4586, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 2.9904153354632586, |
|
"grad_norm": 0.34908654244155446, |
|
"learning_rate": 1.6527036446661396e-05, |
|
"loss": 0.4662, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 3.012779552715655, |
|
"grad_norm": 0.5128836169014913, |
|
"learning_rate": 1.635088931591775e-05, |
|
"loss": 0.3811, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 3.02555910543131, |
|
"grad_norm": 0.3546127617977104, |
|
"learning_rate": 1.6175034512658753e-05, |
|
"loss": 0.3924, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 3.038338658146965, |
|
"grad_norm": 0.6442307260367379, |
|
"learning_rate": 1.5999486124479115e-05, |
|
"loss": 0.393, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 3.0511182108626196, |
|
"grad_norm": 0.5003594645494083, |
|
"learning_rate": 1.5824258214426833e-05, |
|
"loss": 0.3805, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 3.0638977635782747, |
|
"grad_norm": 0.4609554964301076, |
|
"learning_rate": 1.5649364819876655e-05, |
|
"loss": 0.4057, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 3.07667731629393, |
|
"grad_norm": 0.4976572991284983, |
|
"learning_rate": 1.547481995140556e-05, |
|
"loss": 0.3879, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 3.0894568690095845, |
|
"grad_norm": 0.38362452674206254, |
|
"learning_rate": 1.5300637591670357e-05, |
|
"loss": 0.3733, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 3.1022364217252396, |
|
"grad_norm": 0.45482995584383695, |
|
"learning_rate": 1.5126831694287564e-05, |
|
"loss": 0.3696, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 3.1150159744408947, |
|
"grad_norm": 0.44666813959914614, |
|
"learning_rate": 1.4953416182715566e-05, |
|
"loss": 0.3736, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 3.1277955271565494, |
|
"grad_norm": 0.4292571016117774, |
|
"learning_rate": 1.478040494913926e-05, |
|
"loss": 0.3827, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 3.1405750798722045, |
|
"grad_norm": 0.46347737412533785, |
|
"learning_rate": 1.460781185335713e-05, |
|
"loss": 0.3923, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 3.1533546325878596, |
|
"grad_norm": 0.4775852522273919, |
|
"learning_rate": 1.443565072167095e-05, |
|
"loss": 0.4088, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 3.1661341853035143, |
|
"grad_norm": 0.41083533606012235, |
|
"learning_rate": 1.4263935345778202e-05, |
|
"loss": 0.3801, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 3.1789137380191694, |
|
"grad_norm": 0.4484362081329354, |
|
"learning_rate": 1.409267948166718e-05, |
|
"loss": 0.376, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 3.191693290734824, |
|
"grad_norm": 0.3906442765094229, |
|
"learning_rate": 1.3921896848515064e-05, |
|
"loss": 0.3815, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 3.2044728434504792, |
|
"grad_norm": 0.3700225581587479, |
|
"learning_rate": 1.3751601127588849e-05, |
|
"loss": 0.3826, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 3.2172523961661343, |
|
"grad_norm": 0.38606672559398486, |
|
"learning_rate": 1.3581805961149371e-05, |
|
"loss": 0.3779, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 3.230031948881789, |
|
"grad_norm": 0.3643687933394165, |
|
"learning_rate": 1.341252495135841e-05, |
|
"loss": 0.3734, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 3.242811501597444, |
|
"grad_norm": 0.3859244143097149, |
|
"learning_rate": 1.324377165918906e-05, |
|
"loss": 0.3828, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 3.255591054313099, |
|
"grad_norm": 0.387692164875178, |
|
"learning_rate": 1.3075559603339354e-05, |
|
"loss": 0.3811, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 3.268370607028754, |
|
"grad_norm": 0.35085934782850087, |
|
"learning_rate": 1.2907902259149287e-05, |
|
"loss": 0.3861, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 3.281150159744409, |
|
"grad_norm": 0.4166197346329323, |
|
"learning_rate": 1.274081305752135e-05, |
|
"loss": 0.3934, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 3.2939297124600637, |
|
"grad_norm": 0.3418519825744561, |
|
"learning_rate": 1.2574305383844528e-05, |
|
"loss": 0.3877, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 3.306709265175719, |
|
"grad_norm": 0.36354782411908043, |
|
"learning_rate": 1.2408392576922075e-05, |
|
"loss": 0.3849, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 3.319488817891374, |
|
"grad_norm": 0.37242419363296814, |
|
"learning_rate": 1.2243087927902905e-05, |
|
"loss": 0.3937, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 3.3322683706070286, |
|
"grad_norm": 0.40415705465213253, |
|
"learning_rate": 1.2078404679216864e-05, |
|
"loss": 0.3826, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 3.3450479233226837, |
|
"grad_norm": 0.3178335493857135, |
|
"learning_rate": 1.1914356023513904e-05, |
|
"loss": 0.3834, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 3.357827476038339, |
|
"grad_norm": 0.3648587053326533, |
|
"learning_rate": 1.1750955102607193e-05, |
|
"loss": 0.3699, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 3.3706070287539935, |
|
"grad_norm": 0.32908420449476, |
|
"learning_rate": 1.1588215006420374e-05, |
|
"loss": 0.406, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 3.3833865814696487, |
|
"grad_norm": 0.3230655828560371, |
|
"learning_rate": 1.1426148771938915e-05, |
|
"loss": 0.4007, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 3.3961661341853033, |
|
"grad_norm": 0.364247334085622, |
|
"learning_rate": 1.1264769382165748e-05, |
|
"loss": 0.3912, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 3.4089456869009584, |
|
"grad_norm": 0.34854917848030975, |
|
"learning_rate": 1.110408976508118e-05, |
|
"loss": 0.3981, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 3.4217252396166136, |
|
"grad_norm": 0.32853287611588194, |
|
"learning_rate": 1.094412279260726e-05, |
|
"loss": 0.3846, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 3.4345047923322682, |
|
"grad_norm": 0.3407376822824476, |
|
"learning_rate": 1.0784881279576635e-05, |
|
"loss": 0.3877, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 3.4472843450479234, |
|
"grad_norm": 0.3161855560360877, |
|
"learning_rate": 1.0626377982705929e-05, |
|
"loss": 0.3635, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 3.460063897763578, |
|
"grad_norm": 0.29388908626785254, |
|
"learning_rate": 1.0468625599573842e-05, |
|
"loss": 0.3946, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 3.472843450479233, |
|
"grad_norm": 0.31849610999090494, |
|
"learning_rate": 1.0311636767603952e-05, |
|
"loss": 0.3731, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 3.4856230031948883, |
|
"grad_norm": 0.3208723917598162, |
|
"learning_rate": 1.0155424063052306e-05, |
|
"loss": 0.3673, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 3.498402555910543, |
|
"grad_norm": 0.29245116811873584, |
|
"learning_rate": 1.0000000000000006e-05, |
|
"loss": 0.3796, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 3.511182108626198, |
|
"grad_norm": 0.33746616232506677, |
|
"learning_rate": 9.84537702935065e-06, |
|
"loss": 0.386, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 3.523961661341853, |
|
"grad_norm": 0.29309254314881333, |
|
"learning_rate": 9.691567537832964e-06, |
|
"loss": 0.4085, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 3.536741214057508, |
|
"grad_norm": 0.28232085496123377, |
|
"learning_rate": 9.538583847008452e-06, |
|
"loss": 0.3766, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 3.549520766773163, |
|
"grad_norm": 0.30542807138023287, |
|
"learning_rate": 9.386438212284372e-06, |
|
"loss": 0.3713, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 3.562300319488818, |
|
"grad_norm": 0.3001693318285487, |
|
"learning_rate": 9.235142821931928e-06, |
|
"loss": 0.3621, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 3.5750798722044728, |
|
"grad_norm": 0.2706474019112891, |
|
"learning_rate": 9.084709796109907e-06, |
|
"loss": 0.4121, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 3.587859424920128, |
|
"grad_norm": 0.2895527171945164, |
|
"learning_rate": 8.93515118589373e-06, |
|
"loss": 0.3948, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 3.600638977635783, |
|
"grad_norm": 0.3117817510807347, |
|
"learning_rate": 8.786478972310023e-06, |
|
"loss": 0.401, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 3.6134185303514377, |
|
"grad_norm": 0.26216491972350675, |
|
"learning_rate": 8.638705065376887e-06, |
|
"loss": 0.3952, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 3.626198083067093, |
|
"grad_norm": 0.284049361201387, |
|
"learning_rate": 8.491841303149728e-06, |
|
"loss": 0.3943, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 3.6389776357827475, |
|
"grad_norm": 0.2942161552846955, |
|
"learning_rate": 8.345899450772975e-06, |
|
"loss": 0.4198, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 3.6517571884984026, |
|
"grad_norm": 0.2815277755618436, |
|
"learning_rate": 8.200891199537549e-06, |
|
"loss": 0.3775, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 3.6645367412140573, |
|
"grad_norm": 0.27855689397961697, |
|
"learning_rate": 8.056828165944282e-06, |
|
"loss": 0.3701, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 3.6773162939297124, |
|
"grad_norm": 0.29191871129995184, |
|
"learning_rate": 7.913721890773354e-06, |
|
"loss": 0.3736, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 3.6900958466453675, |
|
"grad_norm": 0.26869731691732596, |
|
"learning_rate": 7.771583838159756e-06, |
|
"loss": 0.3831, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 3.702875399361022, |
|
"grad_norm": 0.2671559120158343, |
|
"learning_rate": 7.630425394674903e-06, |
|
"loss": 0.366, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 3.7156549520766773, |
|
"grad_norm": 0.29113928428290825, |
|
"learning_rate": 7.49025786841445e-06, |
|
"loss": 0.378, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 3.7284345047923324, |
|
"grad_norm": 0.2994031778893088, |
|
"learning_rate": 7.3510924880924575e-06, |
|
"loss": 0.3774, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 3.741214057507987, |
|
"grad_norm": 0.2738974817232764, |
|
"learning_rate": 7.212940402141808e-06, |
|
"loss": 0.3846, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 3.753993610223642, |
|
"grad_norm": 0.2826921730840266, |
|
"learning_rate": 7.075812677821145e-06, |
|
"loss": 0.3727, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 3.7667731629392973, |
|
"grad_norm": 0.26668241839234336, |
|
"learning_rate": 6.939720300328303e-06, |
|
"loss": 0.391, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 3.779552715654952, |
|
"grad_norm": 0.2529079411168859, |
|
"learning_rate": 6.8046741719202385e-06, |
|
"loss": 0.3945, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 3.792332268370607, |
|
"grad_norm": 0.26348898417120825, |
|
"learning_rate": 6.67068511103971e-06, |
|
"loss": 0.3764, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 3.8051118210862622, |
|
"grad_norm": 0.2766036475667321, |
|
"learning_rate": 6.537763851448593e-06, |
|
"loss": 0.3857, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 3.817891373801917, |
|
"grad_norm": 0.2682733264498869, |
|
"learning_rate": 6.4059210413680175e-06, |
|
"loss": 0.3873, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 3.830670926517572, |
|
"grad_norm": 0.2742368830967292, |
|
"learning_rate": 6.275167242625331e-06, |
|
"loss": 0.3766, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 3.8434504792332267, |
|
"grad_norm": 0.25114479786735716, |
|
"learning_rate": 6.145512929808013e-06, |
|
"loss": 0.3975, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 3.856230031948882, |
|
"grad_norm": 0.2630221067698174, |
|
"learning_rate": 6.016968489424572e-06, |
|
"loss": 0.3816, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 3.8690095846645365, |
|
"grad_norm": 0.2693660116982185, |
|
"learning_rate": 5.889544219072465e-06, |
|
"loss": 0.3916, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 3.8817891373801916, |
|
"grad_norm": 0.26878930310587784, |
|
"learning_rate": 5.7632503266131925e-06, |
|
"loss": 0.3929, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 3.8945686900958467, |
|
"grad_norm": 0.2576790616305518, |
|
"learning_rate": 5.638096929354522e-06, |
|
"loss": 0.3934, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 3.9073482428115014, |
|
"grad_norm": 0.2551594985684908, |
|
"learning_rate": 5.514094053240035e-06, |
|
"loss": 0.3971, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 3.9201277955271565, |
|
"grad_norm": 0.26282254693750906, |
|
"learning_rate": 5.39125163204594e-06, |
|
"loss": 0.3852, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 3.9329073482428116, |
|
"grad_norm": 0.2729750102242486, |
|
"learning_rate": 5.269579506585259e-06, |
|
"loss": 0.3792, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 3.9456869009584663, |
|
"grad_norm": 0.2746715339249571, |
|
"learning_rate": 5.149087423919541e-06, |
|
"loss": 0.3809, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 3.9584664536741214, |
|
"grad_norm": 0.2589108681555479, |
|
"learning_rate": 5.029785036577976e-06, |
|
"loss": 0.38, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 3.9712460063897765, |
|
"grad_norm": 0.2583977076620543, |
|
"learning_rate": 4.911681901784198e-06, |
|
"loss": 0.3823, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 3.984025559105431, |
|
"grad_norm": 0.26533553439913465, |
|
"learning_rate": 4.794787480690597e-06, |
|
"loss": 0.4095, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 3.9968051118210863, |
|
"grad_norm": 0.29179544366540294, |
|
"learning_rate": 4.679111137620442e-06, |
|
"loss": 0.3764, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 4.0095846645367414, |
|
"grad_norm": 0.37070974270993895, |
|
"learning_rate": 4.5646621393177e-06, |
|
"loss": 0.3437, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 4.022364217252396, |
|
"grad_norm": 0.3284947553106813, |
|
"learning_rate": 4.451449654204685e-06, |
|
"loss": 0.3662, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 4.035143769968051, |
|
"grad_norm": 0.2843402603459979, |
|
"learning_rate": 4.339482751647557e-06, |
|
"loss": 0.3355, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 4.047923322683706, |
|
"grad_norm": 0.33919450346249647, |
|
"learning_rate": 4.228770401229824e-06, |
|
"loss": 0.3394, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 4.060702875399361, |
|
"grad_norm": 0.4472299669588526, |
|
"learning_rate": 4.119321472033779e-06, |
|
"loss": 0.3384, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 4.073482428115016, |
|
"grad_norm": 0.3771224880409924, |
|
"learning_rate": 4.011144731929981e-06, |
|
"loss": 0.3434, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 4.086261980830671, |
|
"grad_norm": 0.2916380797164523, |
|
"learning_rate": 3.904248846874894e-06, |
|
"loss": 0.32, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 4.099041533546326, |
|
"grad_norm": 0.33504974166135404, |
|
"learning_rate": 3.7986423802166705e-06, |
|
"loss": 0.3235, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 4.111821086261981, |
|
"grad_norm": 0.33212341390770345, |
|
"learning_rate": 3.694333792009115e-06, |
|
"loss": 0.3452, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 4.124600638977636, |
|
"grad_norm": 0.31976825359898686, |
|
"learning_rate": 3.5913314383339937e-06, |
|
"loss": 0.3473, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 4.137380191693291, |
|
"grad_norm": 0.28590474501240015, |
|
"learning_rate": 3.4896435706316e-06, |
|
"loss": 0.319, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 4.1501597444089455, |
|
"grad_norm": 0.286464326555546, |
|
"learning_rate": 3.3892783350397675e-06, |
|
"loss": 0.3473, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 4.1629392971246, |
|
"grad_norm": 0.3082641016025165, |
|
"learning_rate": 3.290243771741275e-06, |
|
"loss": 0.3522, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 4.175718849840256, |
|
"grad_norm": 0.30101579695268377, |
|
"learning_rate": 3.1925478143197418e-06, |
|
"loss": 0.3364, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 4.18849840255591, |
|
"grad_norm": 0.30620894785433056, |
|
"learning_rate": 3.0961982891241083e-06, |
|
"loss": 0.3383, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 4.201277955271565, |
|
"grad_norm": 0.26164000104597274, |
|
"learning_rate": 3.001202914641628e-06, |
|
"loss": 0.3387, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 4.214057507987221, |
|
"grad_norm": 0.27396421835749335, |
|
"learning_rate": 2.907569300879596e-06, |
|
"loss": 0.3359, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 4.226837060702875, |
|
"grad_norm": 0.273485919076872, |
|
"learning_rate": 2.815304948755664e-06, |
|
"loss": 0.3116, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 4.23961661341853, |
|
"grad_norm": 0.2569560566217525, |
|
"learning_rate": 2.7244172494969978e-06, |
|
"loss": 0.3369, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 4.252396166134186, |
|
"grad_norm": 0.24962527429270018, |
|
"learning_rate": 2.6349134840481294e-06, |
|
"loss": 0.3716, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 4.26517571884984, |
|
"grad_norm": 0.24800106442570202, |
|
"learning_rate": 2.546800822487714e-06, |
|
"loss": 0.3573, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 4.277955271565495, |
|
"grad_norm": 0.25821807561951066, |
|
"learning_rate": 2.4600863234541338e-06, |
|
"loss": 0.3409, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 4.2907348242811505, |
|
"grad_norm": 0.25158547315449414, |
|
"learning_rate": 2.374776933580025e-06, |
|
"loss": 0.3146, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 4.303514376996805, |
|
"grad_norm": 0.24647625625675712, |
|
"learning_rate": 2.2908794869358044e-06, |
|
"loss": 0.3442, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 4.31629392971246, |
|
"grad_norm": 0.2534440552420663, |
|
"learning_rate": 2.2084007044821764e-06, |
|
"loss": 0.3352, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 4.329073482428115, |
|
"grad_norm": 0.23930760555111544, |
|
"learning_rate": 2.127347193531757e-06, |
|
"loss": 0.3388, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 4.34185303514377, |
|
"grad_norm": 0.23902114243628853, |
|
"learning_rate": 2.0477254472197237e-06, |
|
"loss": 0.3379, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 4.354632587859425, |
|
"grad_norm": 0.24839488860265907, |
|
"learning_rate": 1.96954184398368e-06, |
|
"loss": 0.3239, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 4.36741214057508, |
|
"grad_norm": 0.23307275325799628, |
|
"learning_rate": 1.8928026470526917e-06, |
|
"loss": 0.3671, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 4.380191693290735, |
|
"grad_norm": 0.2475348745152134, |
|
"learning_rate": 1.817514003945524e-06, |
|
"loss": 0.3573, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 4.39297124600639, |
|
"grad_norm": 0.2573337703222915, |
|
"learning_rate": 1.743681945978184e-06, |
|
"loss": 0.324, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 4.405750798722044, |
|
"grad_norm": 0.2509537961622258, |
|
"learning_rate": 1.6713123877807413e-06, |
|
"loss": 0.3235, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 4.4185303514377, |
|
"grad_norm": 0.23972950347988456, |
|
"learning_rate": 1.6004111268235156e-06, |
|
"loss": 0.324, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 4.431309904153355, |
|
"grad_norm": 0.24276933283329488, |
|
"learning_rate": 1.5309838429526714e-06, |
|
"loss": 0.3324, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 4.444089456869009, |
|
"grad_norm": 0.23081639636114068, |
|
"learning_rate": 1.4630360979351644e-06, |
|
"loss": 0.3407, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 4.456869009584665, |
|
"grad_norm": 0.23886385168549315, |
|
"learning_rate": 1.396573335013236e-06, |
|
"loss": 0.3324, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 4.4696485623003195, |
|
"grad_norm": 0.23076136632315059, |
|
"learning_rate": 1.3316008784683265e-06, |
|
"loss": 0.3538, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 4.482428115015974, |
|
"grad_norm": 0.24404693679132852, |
|
"learning_rate": 1.2681239331945695e-06, |
|
"loss": 0.3309, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 4.49520766773163, |
|
"grad_norm": 0.2268383924961628, |
|
"learning_rate": 1.2061475842818337e-06, |
|
"loss": 0.3299, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 4.507987220447284, |
|
"grad_norm": 0.22465135503054393, |
|
"learning_rate": 1.1456767966083393e-06, |
|
"loss": 0.324, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 4.520766773162939, |
|
"grad_norm": 0.2220366554395612, |
|
"learning_rate": 1.086716414442952e-06, |
|
"loss": 0.3676, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 4.533546325878595, |
|
"grad_norm": 0.21940448549398342, |
|
"learning_rate": 1.0292711610570904e-06, |
|
"loss": 0.334, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 4.546325878594249, |
|
"grad_norm": 0.2144078580002602, |
|
"learning_rate": 9.733456383463658e-07, |
|
"loss": 0.3481, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 4.559105431309904, |
|
"grad_norm": 0.2275935061530386, |
|
"learning_rate": 9.189443264619102e-07, |
|
"loss": 0.3256, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 4.571884984025559, |
|
"grad_norm": 0.226985551191106, |
|
"learning_rate": 8.660715834514977e-07, |
|
"loss": 0.3173, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 4.584664536741214, |
|
"grad_norm": 0.22485580253792856, |
|
"learning_rate": 8.147316449103959e-07, |
|
"loss": 0.3446, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 4.597444089456869, |
|
"grad_norm": 0.2340075357389187, |
|
"learning_rate": 7.649286236420806e-07, |
|
"loss": 0.3364, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 4.6102236421725244, |
|
"grad_norm": 0.22345846407489506, |
|
"learning_rate": 7.166665093287539e-07, |
|
"loss": 0.3367, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 4.623003194888179, |
|
"grad_norm": 0.22370903675134662, |
|
"learning_rate": 6.69949168211721e-07, |
|
"loss": 0.3602, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 4.635782747603834, |
|
"grad_norm": 0.22486085593015095, |
|
"learning_rate": 6.247803427816945e-07, |
|
"loss": 0.357, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 4.6485623003194885, |
|
"grad_norm": 0.22083816946876406, |
|
"learning_rate": 5.811636514789598e-07, |
|
"loss": 0.3407, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 4.661341853035144, |
|
"grad_norm": 0.2289657141053928, |
|
"learning_rate": 5.391025884035239e-07, |
|
"loss": 0.3283, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 4.674121405750799, |
|
"grad_norm": 0.2245238228806618, |
|
"learning_rate": 4.986005230351954e-07, |
|
"loss": 0.3358, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 4.686900958466453, |
|
"grad_norm": 0.22184337753951489, |
|
"learning_rate": 4.5966069996365993e-07, |
|
"loss": 0.3331, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 4.699680511182109, |
|
"grad_norm": 0.2304636950863329, |
|
"learning_rate": 4.22286238628562e-07, |
|
"loss": 0.3297, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 4.712460063897764, |
|
"grad_norm": 0.2168016391163561, |
|
"learning_rate": 3.8648013306960664e-07, |
|
"loss": 0.3407, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 4.725239616613418, |
|
"grad_norm": 0.21992239865456173, |
|
"learning_rate": 3.522452516867048e-07, |
|
"loss": 0.3278, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 4.738019169329074, |
|
"grad_norm": 0.22103634468553346, |
|
"learning_rate": 3.1958433701019697e-07, |
|
"loss": 0.3136, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 4.7507987220447285, |
|
"grad_norm": 0.221656009377628, |
|
"learning_rate": 2.8850000548115155e-07, |
|
"loss": 0.342, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 4.763578274760383, |
|
"grad_norm": 0.2195544732802204, |
|
"learning_rate": 2.5899474724174313e-07, |
|
"loss": 0.3507, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 4.776357827476039, |
|
"grad_norm": 0.23056922821652975, |
|
"learning_rate": 2.3107092593579905e-07, |
|
"loss": 0.3375, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 4.789137380191693, |
|
"grad_norm": 0.222492788995926, |
|
"learning_rate": 2.0473077851942858e-07, |
|
"loss": 0.3524, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 4.801916932907348, |
|
"grad_norm": 0.22232266738755696, |
|
"learning_rate": 1.799764150818306e-07, |
|
"loss": 0.3511, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 4.814696485623003, |
|
"grad_norm": 0.21962084479736094, |
|
"learning_rate": 1.5680981867625566e-07, |
|
"loss": 0.3334, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 4.827476038338658, |
|
"grad_norm": 0.21579569570483248, |
|
"learning_rate": 1.3523284516113955e-07, |
|
"loss": 0.3578, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 4.840255591054313, |
|
"grad_norm": 0.21848361329456045, |
|
"learning_rate": 1.1524722305144231e-07, |
|
"loss": 0.3482, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 4.853035143769968, |
|
"grad_norm": 0.22295101336863904, |
|
"learning_rate": 9.685455338016347e-08, |
|
"loss": 0.344, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 4.865814696485623, |
|
"grad_norm": 0.22388599501739676, |
|
"learning_rate": 8.005630957010014e-08, |
|
"loss": 0.3507, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 4.878594249201278, |
|
"grad_norm": 0.21620839076719045, |
|
"learning_rate": 6.485383731580142e-08, |
|
"loss": 0.3338, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 4.891373801916933, |
|
"grad_norm": 0.21452810970623, |
|
"learning_rate": 5.1248354475768034e-08, |
|
"loss": 0.3269, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 4.904153354632588, |
|
"grad_norm": 0.22073444367795245, |
|
"learning_rate": 3.924095097489922e-08, |
|
"loss": 0.3343, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 4.916932907348243, |
|
"grad_norm": 0.2168939465061565, |
|
"learning_rate": 2.8832588717164766e-08, |
|
"loss": 0.3393, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 4.9297124600638975, |
|
"grad_norm": 0.22692258552939853, |
|
"learning_rate": 2.0024101508555604e-08, |
|
"loss": 0.3312, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 4.942492012779553, |
|
"grad_norm": 0.21678847192685047, |
|
"learning_rate": 1.281619499029274e-08, |
|
"loss": 0.3482, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 4.955271565495208, |
|
"grad_norm": 0.22738297907016455, |
|
"learning_rate": 7.209446582292501e-09, |
|
"loss": 0.3349, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 4.968051118210862, |
|
"grad_norm": 0.222159525238591, |
|
"learning_rate": 3.2043054369057523e-09, |
|
"loss": 0.3281, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 4.980830670926517, |
|
"grad_norm": 0.22624430672708137, |
|
"learning_rate": 8.010924029533406e-10, |
|
"loss": 0.3238, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 4.993610223642173, |
|
"grad_norm": 0.23797061131952146, |
|
"learning_rate": 0.0, |
|
"loss": 0.3224, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 4.993610223642173, |
|
"step": 390, |
|
"total_flos": 1.2901911719428751e+18, |
|
"train_loss": 0.14476858102358303, |
|
"train_runtime": 3885.8859, |
|
"train_samples_per_second": 12.867, |
|
"train_steps_per_second": 0.1 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 390, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.2901911719428751e+18, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|