|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.992, |
|
"eval_steps": 500, |
|
"global_step": 390, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0128, |
|
"grad_norm": 8.025663346068319, |
|
"learning_rate": 1.0256410256410257e-06, |
|
"loss": 1.1663, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0256, |
|
"grad_norm": 8.010365036638017, |
|
"learning_rate": 2.0512820512820513e-06, |
|
"loss": 1.1727, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0384, |
|
"grad_norm": 7.843541941375245, |
|
"learning_rate": 3.0769230769230774e-06, |
|
"loss": 1.167, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0512, |
|
"grad_norm": 7.421590803800207, |
|
"learning_rate": 4.102564102564103e-06, |
|
"loss": 1.1321, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.064, |
|
"grad_norm": 5.805442949718198, |
|
"learning_rate": 5.128205128205128e-06, |
|
"loss": 1.0654, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0768, |
|
"grad_norm": 3.2603146548909483, |
|
"learning_rate": 6.153846153846155e-06, |
|
"loss": 1.0231, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0896, |
|
"grad_norm": 2.4566327965356973, |
|
"learning_rate": 7.17948717948718e-06, |
|
"loss": 0.9756, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.1024, |
|
"grad_norm": 5.131605177002356, |
|
"learning_rate": 8.205128205128205e-06, |
|
"loss": 1.0153, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.1152, |
|
"grad_norm": 4.906714594857795, |
|
"learning_rate": 9.230769230769232e-06, |
|
"loss": 0.9494, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.128, |
|
"grad_norm": 5.191276044454782, |
|
"learning_rate": 1.0256410256410256e-05, |
|
"loss": 0.9364, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.1408, |
|
"grad_norm": 4.859610945094299, |
|
"learning_rate": 1.1282051282051283e-05, |
|
"loss": 0.9595, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.1536, |
|
"grad_norm": 3.611776180080131, |
|
"learning_rate": 1.230769230769231e-05, |
|
"loss": 0.8851, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.1664, |
|
"grad_norm": 3.2071393973457853, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 0.8609, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.1792, |
|
"grad_norm": 3.3164237243549466, |
|
"learning_rate": 1.435897435897436e-05, |
|
"loss": 0.8659, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.192, |
|
"grad_norm": 2.8241206428341763, |
|
"learning_rate": 1.5384615384615387e-05, |
|
"loss": 0.8637, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.2048, |
|
"grad_norm": 2.5429042446588745, |
|
"learning_rate": 1.641025641025641e-05, |
|
"loss": 0.8177, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.2176, |
|
"grad_norm": 2.233797254080317, |
|
"learning_rate": 1.7435897435897438e-05, |
|
"loss": 0.8268, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.2304, |
|
"grad_norm": 1.6997937530369438, |
|
"learning_rate": 1.8461538461538465e-05, |
|
"loss": 0.8072, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.2432, |
|
"grad_norm": 1.699841364037304, |
|
"learning_rate": 1.9487179487179488e-05, |
|
"loss": 0.7964, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.256, |
|
"grad_norm": 1.5492702146359056, |
|
"learning_rate": 2.0512820512820512e-05, |
|
"loss": 0.8058, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.2688, |
|
"grad_norm": 1.2836274496193323, |
|
"learning_rate": 2.153846153846154e-05, |
|
"loss": 0.7692, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.2816, |
|
"grad_norm": 1.3773948260948878, |
|
"learning_rate": 2.2564102564102566e-05, |
|
"loss": 0.798, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.2944, |
|
"grad_norm": 0.9779024320556644, |
|
"learning_rate": 2.3589743589743593e-05, |
|
"loss": 0.7898, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.3072, |
|
"grad_norm": 1.2856224063212611, |
|
"learning_rate": 2.461538461538462e-05, |
|
"loss": 0.7798, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.167576837058794, |
|
"learning_rate": 2.5641025641025646e-05, |
|
"loss": 0.7678, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.3328, |
|
"grad_norm": 1.1982452842844535, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 0.7704, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.3456, |
|
"grad_norm": 1.4953948330166138, |
|
"learning_rate": 2.7692307692307694e-05, |
|
"loss": 0.7707, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.3584, |
|
"grad_norm": 1.0238727274473236, |
|
"learning_rate": 2.871794871794872e-05, |
|
"loss": 0.7768, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.3712, |
|
"grad_norm": 1.1534610250721853, |
|
"learning_rate": 2.9743589743589747e-05, |
|
"loss": 0.7743, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.384, |
|
"grad_norm": 0.9055874647009523, |
|
"learning_rate": 3.0769230769230774e-05, |
|
"loss": 0.7258, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.3968, |
|
"grad_norm": 1.3165475640221869, |
|
"learning_rate": 3.1794871794871795e-05, |
|
"loss": 0.7585, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.4096, |
|
"grad_norm": 0.9866053582540805, |
|
"learning_rate": 3.282051282051282e-05, |
|
"loss": 0.7522, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.4224, |
|
"grad_norm": 1.0543336745444751, |
|
"learning_rate": 3.384615384615385e-05, |
|
"loss": 0.7478, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.4352, |
|
"grad_norm": 1.0227139967912582, |
|
"learning_rate": 3.4871794871794875e-05, |
|
"loss": 0.7716, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.448, |
|
"grad_norm": 1.024288689780223, |
|
"learning_rate": 3.58974358974359e-05, |
|
"loss": 0.7303, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.4608, |
|
"grad_norm": 0.9339962453032254, |
|
"learning_rate": 3.692307692307693e-05, |
|
"loss": 0.7317, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.4736, |
|
"grad_norm": 1.0869869233699212, |
|
"learning_rate": 3.794871794871795e-05, |
|
"loss": 0.7695, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.4864, |
|
"grad_norm": 1.0873092741588706, |
|
"learning_rate": 3.8974358974358976e-05, |
|
"loss": 0.7452, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.4992, |
|
"grad_norm": 1.031333961155868, |
|
"learning_rate": 4e-05, |
|
"loss": 0.7233, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.512, |
|
"grad_norm": 1.0777106186591243, |
|
"learning_rate": 3.9999198907597046e-05, |
|
"loss": 0.7303, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.5248, |
|
"grad_norm": 1.012613449746541, |
|
"learning_rate": 3.9996795694563096e-05, |
|
"loss": 0.7525, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.5376, |
|
"grad_norm": 1.015287100824931, |
|
"learning_rate": 3.999279055341771e-05, |
|
"loss": 0.7431, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.5504, |
|
"grad_norm": 0.9547666584275336, |
|
"learning_rate": 3.998718380500971e-05, |
|
"loss": 0.7319, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.5632, |
|
"grad_norm": 1.125422016781764, |
|
"learning_rate": 3.997997589849145e-05, |
|
"loss": 0.7368, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.576, |
|
"grad_norm": 1.0370344242184166, |
|
"learning_rate": 3.9971167411282835e-05, |
|
"loss": 0.7734, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.5888, |
|
"grad_norm": 0.9362145505319984, |
|
"learning_rate": 3.99607590490251e-05, |
|
"loss": 0.7214, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.6016, |
|
"grad_norm": 0.8967046364088168, |
|
"learning_rate": 3.9948751645524235e-05, |
|
"loss": 0.713, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.6144, |
|
"grad_norm": 0.9460244206042299, |
|
"learning_rate": 3.9935146162684206e-05, |
|
"loss": 0.7246, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.6272, |
|
"grad_norm": 1.0085919950975422, |
|
"learning_rate": 3.9919943690429906e-05, |
|
"loss": 0.7273, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.8839138156298016, |
|
"learning_rate": 3.9903145446619837e-05, |
|
"loss": 0.7378, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.6528, |
|
"grad_norm": 0.8111663747625724, |
|
"learning_rate": 3.9884752776948564e-05, |
|
"loss": 0.7249, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.6656, |
|
"grad_norm": 1.0259051345509014, |
|
"learning_rate": 3.9864767154838864e-05, |
|
"loss": 0.7434, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.6784, |
|
"grad_norm": 0.8301599918624412, |
|
"learning_rate": 3.9843190181323744e-05, |
|
"loss": 0.7256, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.6912, |
|
"grad_norm": 0.9154124540951556, |
|
"learning_rate": 3.982002358491817e-05, |
|
"loss": 0.735, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.704, |
|
"grad_norm": 0.7513948251693194, |
|
"learning_rate": 3.979526922148058e-05, |
|
"loss": 0.7343, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.7168, |
|
"grad_norm": 0.8351370147873389, |
|
"learning_rate": 3.9768929074064206e-05, |
|
"loss": 0.7308, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.7296, |
|
"grad_norm": 0.7560411621872986, |
|
"learning_rate": 3.9741005252758255e-05, |
|
"loss": 0.6877, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.7424, |
|
"grad_norm": 0.8386013917599205, |
|
"learning_rate": 3.971149999451886e-05, |
|
"loss": 0.7273, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.7552, |
|
"grad_norm": 0.7909573435703522, |
|
"learning_rate": 3.9680415662989806e-05, |
|
"loss": 0.7151, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.768, |
|
"grad_norm": 1.048222420941934, |
|
"learning_rate": 3.9647754748313294e-05, |
|
"loss": 0.7068, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.7808, |
|
"grad_norm": 0.8656447604390025, |
|
"learning_rate": 3.96135198669304e-05, |
|
"loss": 0.7468, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.7936, |
|
"grad_norm": 0.9225813872947002, |
|
"learning_rate": 3.957771376137144e-05, |
|
"loss": 0.7259, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.8064, |
|
"grad_norm": 1.0576195176304044, |
|
"learning_rate": 3.954033930003634e-05, |
|
"loss": 0.7256, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.8192, |
|
"grad_norm": 0.9518126408204165, |
|
"learning_rate": 3.9501399476964806e-05, |
|
"loss": 0.6897, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.832, |
|
"grad_norm": 0.851324477460364, |
|
"learning_rate": 3.946089741159648e-05, |
|
"loss": 0.7522, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.8448, |
|
"grad_norm": 0.6716149073553244, |
|
"learning_rate": 3.9418836348521045e-05, |
|
"loss": 0.7013, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.8576, |
|
"grad_norm": 0.7482771712729901, |
|
"learning_rate": 3.937521965721831e-05, |
|
"loss": 0.7222, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.8704, |
|
"grad_norm": 0.6357576574196572, |
|
"learning_rate": 3.933005083178828e-05, |
|
"loss": 0.6922, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.8832, |
|
"grad_norm": 1.0644856785033119, |
|
"learning_rate": 3.928333349067125e-05, |
|
"loss": 0.723, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.896, |
|
"grad_norm": 0.6405157264180997, |
|
"learning_rate": 3.923507137635792e-05, |
|
"loss": 0.7169, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.9088, |
|
"grad_norm": 0.8419706953240476, |
|
"learning_rate": 3.9185268355089606e-05, |
|
"loss": 0.7358, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.9216, |
|
"grad_norm": 0.6542849794196405, |
|
"learning_rate": 3.913392841654851e-05, |
|
"loss": 0.7195, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.9344, |
|
"grad_norm": 0.8634302833607698, |
|
"learning_rate": 3.9081055673538093e-05, |
|
"loss": 0.7203, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.9472, |
|
"grad_norm": 0.6567808913482877, |
|
"learning_rate": 3.902665436165364e-05, |
|
"loss": 0.7118, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.7812778861933127, |
|
"learning_rate": 3.897072883894291e-05, |
|
"loss": 0.7201, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.9728, |
|
"grad_norm": 0.7326117314540337, |
|
"learning_rate": 3.8913283585557054e-05, |
|
"loss": 0.7116, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.9856, |
|
"grad_norm": 0.6834658442763747, |
|
"learning_rate": 3.885432320339167e-05, |
|
"loss": 0.6688, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.9984, |
|
"grad_norm": 0.7736960730883061, |
|
"learning_rate": 3.879385241571817e-05, |
|
"loss": 0.7422, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.0112, |
|
"grad_norm": 0.8791421881042536, |
|
"learning_rate": 3.873187606680543e-05, |
|
"loss": 0.6103, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.024, |
|
"grad_norm": 0.7675433065867431, |
|
"learning_rate": 3.866839912153168e-05, |
|
"loss": 0.5395, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.0368, |
|
"grad_norm": 0.9250645931902065, |
|
"learning_rate": 3.860342666498677e-05, |
|
"loss": 0.5512, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.0496, |
|
"grad_norm": 0.9096027266656481, |
|
"learning_rate": 3.853696390206484e-05, |
|
"loss": 0.5287, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.0624, |
|
"grad_norm": 0.7763008273177653, |
|
"learning_rate": 3.846901615704734e-05, |
|
"loss": 0.5327, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.0752, |
|
"grad_norm": 0.9327758899718949, |
|
"learning_rate": 3.839958887317649e-05, |
|
"loss": 0.5667, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.088, |
|
"grad_norm": 0.8127513945945763, |
|
"learning_rate": 3.832868761221926e-05, |
|
"loss": 0.5238, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.1008, |
|
"grad_norm": 0.7251658165407489, |
|
"learning_rate": 3.825631805402182e-05, |
|
"loss": 0.5025, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 1.1136, |
|
"grad_norm": 1.0593447852460571, |
|
"learning_rate": 3.818248599605448e-05, |
|
"loss": 0.5418, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 1.1264, |
|
"grad_norm": 0.7502126366792881, |
|
"learning_rate": 3.810719735294731e-05, |
|
"loss": 0.5436, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.1392, |
|
"grad_norm": 0.9025217984099986, |
|
"learning_rate": 3.8030458156016326e-05, |
|
"loss": 0.5632, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.152, |
|
"grad_norm": 0.7752528081018466, |
|
"learning_rate": 3.795227455278029e-05, |
|
"loss": 0.5698, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.1648, |
|
"grad_norm": 0.8801407549146449, |
|
"learning_rate": 3.787265280646825e-05, |
|
"loss": 0.5438, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.1776, |
|
"grad_norm": 0.7496356441331226, |
|
"learning_rate": 3.7791599295517825e-05, |
|
"loss": 0.5192, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.1904, |
|
"grad_norm": 0.8631634999670239, |
|
"learning_rate": 3.7709120513064196e-05, |
|
"loss": 0.5209, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.2032, |
|
"grad_norm": 0.8499904547567155, |
|
"learning_rate": 3.762522306641998e-05, |
|
"loss": 0.5391, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.216, |
|
"grad_norm": 0.8005857162187027, |
|
"learning_rate": 3.7539913676545874e-05, |
|
"loss": 0.5316, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.2288000000000001, |
|
"grad_norm": 0.7725253831065256, |
|
"learning_rate": 3.745319917751229e-05, |
|
"loss": 0.532, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.2416, |
|
"grad_norm": 0.8111181981066931, |
|
"learning_rate": 3.736508651595188e-05, |
|
"loss": 0.5233, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.2544, |
|
"grad_norm": 0.9389753171466569, |
|
"learning_rate": 3.727558275050301e-05, |
|
"loss": 0.5075, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.2671999999999999, |
|
"grad_norm": 0.644270337116294, |
|
"learning_rate": 3.718469505124434e-05, |
|
"loss": 0.4967, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.9122151730709385, |
|
"learning_rate": 3.709243069912041e-05, |
|
"loss": 0.5285, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.2928, |
|
"grad_norm": 0.8354629879773563, |
|
"learning_rate": 3.699879708535838e-05, |
|
"loss": 0.5268, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.3056, |
|
"grad_norm": 0.6812178009255956, |
|
"learning_rate": 3.69038017108759e-05, |
|
"loss": 0.5123, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.3184, |
|
"grad_norm": 0.7042512191630624, |
|
"learning_rate": 3.680745218568026e-05, |
|
"loss": 0.5129, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.3312, |
|
"grad_norm": 0.7299803805994697, |
|
"learning_rate": 3.6709756228258735e-05, |
|
"loss": 0.5343, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.3439999999999999, |
|
"grad_norm": 0.6871381008164456, |
|
"learning_rate": 3.6610721664960236e-05, |
|
"loss": 0.5059, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.3568, |
|
"grad_norm": 0.6771983066014872, |
|
"learning_rate": 3.65103564293684e-05, |
|
"loss": 0.5135, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.3696, |
|
"grad_norm": 0.6556281104458144, |
|
"learning_rate": 3.640866856166601e-05, |
|
"loss": 0.5435, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.3824, |
|
"grad_norm": 0.7112832435326318, |
|
"learning_rate": 3.6305666207990886e-05, |
|
"loss": 0.5285, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.3952, |
|
"grad_norm": 0.6998462790858951, |
|
"learning_rate": 3.6201357619783336e-05, |
|
"loss": 0.5258, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.408, |
|
"grad_norm": 0.6339926050593321, |
|
"learning_rate": 3.609575115312511e-05, |
|
"loss": 0.5046, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.4208, |
|
"grad_norm": 0.8460983667609926, |
|
"learning_rate": 3.598885526807003e-05, |
|
"loss": 0.5018, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.4336, |
|
"grad_norm": 0.6412627311526433, |
|
"learning_rate": 3.5880678527966224e-05, |
|
"loss": 0.548, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.4464000000000001, |
|
"grad_norm": 0.8449945191687394, |
|
"learning_rate": 3.577122959877017e-05, |
|
"loss": 0.4819, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.4592, |
|
"grad_norm": 0.7266016409490449, |
|
"learning_rate": 3.566051724835245e-05, |
|
"loss": 0.5382, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.472, |
|
"grad_norm": 0.8366889568978516, |
|
"learning_rate": 3.554855034579532e-05, |
|
"loss": 0.5364, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.4848, |
|
"grad_norm": 0.6935555814080242, |
|
"learning_rate": 3.5435337860682304e-05, |
|
"loss": 0.5231, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.4976, |
|
"grad_norm": 0.8605730069954024, |
|
"learning_rate": 3.532088886237956e-05, |
|
"loss": 0.4948, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.5104, |
|
"grad_norm": 0.6444476399667671, |
|
"learning_rate": 3.520521251930941e-05, |
|
"loss": 0.5073, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.5232, |
|
"grad_norm": 0.793199587534131, |
|
"learning_rate": 3.5088318098215805e-05, |
|
"loss": 0.5191, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.536, |
|
"grad_norm": 0.822697091265982, |
|
"learning_rate": 3.497021496342203e-05, |
|
"loss": 0.5037, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.5488, |
|
"grad_norm": 0.6553802937542286, |
|
"learning_rate": 3.485091257608047e-05, |
|
"loss": 0.524, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.5615999999999999, |
|
"grad_norm": 0.8048151807443502, |
|
"learning_rate": 3.473042049341474e-05, |
|
"loss": 0.4966, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.5744, |
|
"grad_norm": 0.6723206384765505, |
|
"learning_rate": 3.4608748367954064e-05, |
|
"loss": 0.5171, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.5872000000000002, |
|
"grad_norm": 0.686453936800869, |
|
"learning_rate": 3.4485905946759965e-05, |
|
"loss": 0.5169, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.6470781206840747, |
|
"learning_rate": 3.4361903070645484e-05, |
|
"loss": 0.5159, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.6128, |
|
"grad_norm": 0.6909611931420626, |
|
"learning_rate": 3.423674967338681e-05, |
|
"loss": 0.5097, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.6256, |
|
"grad_norm": 0.6666646530696388, |
|
"learning_rate": 3.411045578092754e-05, |
|
"loss": 0.5226, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.6383999999999999, |
|
"grad_norm": 0.6640563394076078, |
|
"learning_rate": 3.398303151057543e-05, |
|
"loss": 0.5604, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.6512, |
|
"grad_norm": 0.6512711394321913, |
|
"learning_rate": 3.385448707019199e-05, |
|
"loss": 0.5032, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.6640000000000001, |
|
"grad_norm": 0.6205754298591117, |
|
"learning_rate": 3.372483275737468e-05, |
|
"loss": 0.5266, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.6768, |
|
"grad_norm": 0.6072899721655831, |
|
"learning_rate": 3.359407895863199e-05, |
|
"loss": 0.5188, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.6896, |
|
"grad_norm": 0.6149441351391679, |
|
"learning_rate": 3.34622361485514e-05, |
|
"loss": 0.5357, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.7024, |
|
"grad_norm": 0.7240301884950392, |
|
"learning_rate": 3.332931488896029e-05, |
|
"loss": 0.5402, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.7151999999999998, |
|
"grad_norm": 0.6201070184569217, |
|
"learning_rate": 3.319532582807977e-05, |
|
"loss": 0.5015, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.728, |
|
"grad_norm": 0.7091061099254196, |
|
"learning_rate": 3.30602796996717e-05, |
|
"loss": 0.5408, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.7408000000000001, |
|
"grad_norm": 0.624733686451166, |
|
"learning_rate": 3.2924187322178865e-05, |
|
"loss": 0.5203, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.7536, |
|
"grad_norm": 0.5983194782889787, |
|
"learning_rate": 3.278705959785821e-05, |
|
"loss": 0.5119, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.7664, |
|
"grad_norm": 0.6052490460873315, |
|
"learning_rate": 3.2648907511907544e-05, |
|
"loss": 0.5244, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.7792, |
|
"grad_norm": 0.631071576498964, |
|
"learning_rate": 3.250974213158555e-05, |
|
"loss": 0.5078, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.792, |
|
"grad_norm": 0.6333833828469582, |
|
"learning_rate": 3.23695746053251e-05, |
|
"loss": 0.5029, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.8048, |
|
"grad_norm": 0.6765084985059329, |
|
"learning_rate": 3.222841616184025e-05, |
|
"loss": 0.5409, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.8176, |
|
"grad_norm": 0.6874631191014754, |
|
"learning_rate": 3.208627810922665e-05, |
|
"loss": 0.5275, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.8304, |
|
"grad_norm": 0.646534216918847, |
|
"learning_rate": 3.194317183405573e-05, |
|
"loss": 0.4959, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.8432, |
|
"grad_norm": 0.622434488996995, |
|
"learning_rate": 3.1799108800462466e-05, |
|
"loss": 0.5297, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.8559999999999999, |
|
"grad_norm": 0.6870636440715688, |
|
"learning_rate": 3.1654100549227024e-05, |
|
"loss": 0.5419, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.8688, |
|
"grad_norm": 0.7008181875957741, |
|
"learning_rate": 3.1508158696850275e-05, |
|
"loss": 0.5061, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.8816000000000002, |
|
"grad_norm": 0.6695818972080918, |
|
"learning_rate": 3.136129493462312e-05, |
|
"loss": 0.5212, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.8944, |
|
"grad_norm": 0.6249566453914562, |
|
"learning_rate": 3.121352102768998e-05, |
|
"loss": 0.5178, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.9072, |
|
"grad_norm": 0.6001095980182269, |
|
"learning_rate": 3.106484881410628e-05, |
|
"loss": 0.5055, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.6343835646364906, |
|
"learning_rate": 3.091529020389009e-05, |
|
"loss": 0.5164, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.9327999999999999, |
|
"grad_norm": 0.5818556366158101, |
|
"learning_rate": 3.076485717806808e-05, |
|
"loss": 0.5121, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.9456, |
|
"grad_norm": 0.6685596112450095, |
|
"learning_rate": 3.061356178771564e-05, |
|
"loss": 0.5255, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.9584000000000001, |
|
"grad_norm": 0.6398404450965353, |
|
"learning_rate": 3.0461416152991555e-05, |
|
"loss": 0.5476, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.9712, |
|
"grad_norm": 0.562220294385078, |
|
"learning_rate": 3.0308432462167045e-05, |
|
"loss": 0.5247, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.984, |
|
"grad_norm": 0.6575778041043532, |
|
"learning_rate": 3.015462297064936e-05, |
|
"loss": 0.5159, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.9968, |
|
"grad_norm": 0.5998901587688095, |
|
"learning_rate": 3.0000000000000004e-05, |
|
"loss": 0.5145, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 2.0096, |
|
"grad_norm": 0.8178696802765414, |
|
"learning_rate": 2.98445759369477e-05, |
|
"loss": 0.3839, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 2.0224, |
|
"grad_norm": 0.6287005638269078, |
|
"learning_rate": 2.9688363232396056e-05, |
|
"loss": 0.3389, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 2.0352, |
|
"grad_norm": 1.2342897436763312, |
|
"learning_rate": 2.9531374400426158e-05, |
|
"loss": 0.3668, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 2.048, |
|
"grad_norm": 0.8044570182081753, |
|
"learning_rate": 2.9373622017294075e-05, |
|
"loss": 0.3334, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.0608, |
|
"grad_norm": 0.733943874642229, |
|
"learning_rate": 2.9215118720423375e-05, |
|
"loss": 0.3329, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 2.0736, |
|
"grad_norm": 0.6995215819465724, |
|
"learning_rate": 2.9055877207392752e-05, |
|
"loss": 0.3387, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 2.0864, |
|
"grad_norm": 0.5957877635339981, |
|
"learning_rate": 2.8895910234918828e-05, |
|
"loss": 0.3182, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 2.0992, |
|
"grad_norm": 0.6911074879178541, |
|
"learning_rate": 2.873523061783426e-05, |
|
"loss": 0.3369, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 2.112, |
|
"grad_norm": 0.6148675957794936, |
|
"learning_rate": 2.8573851228061084e-05, |
|
"loss": 0.3162, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 2.1248, |
|
"grad_norm": 0.7359940694844278, |
|
"learning_rate": 2.8411784993579633e-05, |
|
"loss": 0.3371, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 2.1376, |
|
"grad_norm": 0.543831434662558, |
|
"learning_rate": 2.8249044897392814e-05, |
|
"loss": 0.3161, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 2.1504, |
|
"grad_norm": 0.7158141974018504, |
|
"learning_rate": 2.80856439764861e-05, |
|
"loss": 0.3483, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 2.1632, |
|
"grad_norm": 0.639906701994446, |
|
"learning_rate": 2.792159532078314e-05, |
|
"loss": 0.3294, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 2.176, |
|
"grad_norm": 0.654613598085333, |
|
"learning_rate": 2.77569120720971e-05, |
|
"loss": 0.3283, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 2.1888, |
|
"grad_norm": 0.5665845652546264, |
|
"learning_rate": 2.7591607423077932e-05, |
|
"loss": 0.319, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 2.2016, |
|
"grad_norm": 0.568410708254759, |
|
"learning_rate": 2.7425694616155474e-05, |
|
"loss": 0.3269, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 2.2144, |
|
"grad_norm": 0.5954621395011873, |
|
"learning_rate": 2.7259186942478656e-05, |
|
"loss": 0.3427, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 2.2272, |
|
"grad_norm": 0.5586111590619681, |
|
"learning_rate": 2.7092097740850712e-05, |
|
"loss": 0.3107, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 0.5376766169086076, |
|
"learning_rate": 2.692444039666066e-05, |
|
"loss": 0.3005, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 2.2528, |
|
"grad_norm": 0.528351139194909, |
|
"learning_rate": 2.6756228340810946e-05, |
|
"loss": 0.3126, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 2.2656, |
|
"grad_norm": 0.546595127743902, |
|
"learning_rate": 2.6587475048641596e-05, |
|
"loss": 0.3231, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 2.2784, |
|
"grad_norm": 0.5122337622446388, |
|
"learning_rate": 2.6418194038850634e-05, |
|
"loss": 0.323, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 2.2912, |
|
"grad_norm": 0.5263518084262206, |
|
"learning_rate": 2.624839887241115e-05, |
|
"loss": 0.3164, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 2.304, |
|
"grad_norm": 0.5430640335364739, |
|
"learning_rate": 2.607810315148494e-05, |
|
"loss": 0.3085, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 2.3168, |
|
"grad_norm": 0.5082204751148293, |
|
"learning_rate": 2.5907320518332827e-05, |
|
"loss": 0.3088, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 2.3296, |
|
"grad_norm": 0.541141604264627, |
|
"learning_rate": 2.5736064654221808e-05, |
|
"loss": 0.3067, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 2.3424, |
|
"grad_norm": 0.5233973076722868, |
|
"learning_rate": 2.5564349278329056e-05, |
|
"loss": 0.3234, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 2.3552, |
|
"grad_norm": 0.503039465860446, |
|
"learning_rate": 2.539218814664288e-05, |
|
"loss": 0.3473, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 2.368, |
|
"grad_norm": 0.5398717298058211, |
|
"learning_rate": 2.521959505086075e-05, |
|
"loss": 0.3188, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 2.3808, |
|
"grad_norm": 0.5278276809965626, |
|
"learning_rate": 2.5046583817284437e-05, |
|
"loss": 0.3357, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 2.3936, |
|
"grad_norm": 0.5384100052646946, |
|
"learning_rate": 2.487316830571244e-05, |
|
"loss": 0.3074, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 2.4064, |
|
"grad_norm": 0.5085981338026319, |
|
"learning_rate": 2.4699362408329646e-05, |
|
"loss": 0.3224, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 2.4192, |
|
"grad_norm": 0.5189683817102482, |
|
"learning_rate": 2.4525180048594452e-05, |
|
"loss": 0.3257, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 2.432, |
|
"grad_norm": 0.5569733322985922, |
|
"learning_rate": 2.435063518012335e-05, |
|
"loss": 0.333, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 2.4448, |
|
"grad_norm": 0.5033112925952238, |
|
"learning_rate": 2.4175741785573177e-05, |
|
"loss": 0.3125, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 2.4576000000000002, |
|
"grad_norm": 0.5241847984037186, |
|
"learning_rate": 2.4000513875520892e-05, |
|
"loss": 0.3014, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 2.4704, |
|
"grad_norm": 0.565705979756712, |
|
"learning_rate": 2.3824965487341247e-05, |
|
"loss": 0.3157, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 2.4832, |
|
"grad_norm": 0.5041792244538641, |
|
"learning_rate": 2.3649110684082258e-05, |
|
"loss": 0.3196, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 2.496, |
|
"grad_norm": 0.5516283301790813, |
|
"learning_rate": 2.3472963553338614e-05, |
|
"loss": 0.3076, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 2.5088, |
|
"grad_norm": 0.5439002732552327, |
|
"learning_rate": 2.3296538206123134e-05, |
|
"loss": 0.3295, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 2.5216, |
|
"grad_norm": 0.48448477685253244, |
|
"learning_rate": 2.311984877573636e-05, |
|
"loss": 0.3302, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 2.5343999999999998, |
|
"grad_norm": 0.5335115783744465, |
|
"learning_rate": 2.2942909416634326e-05, |
|
"loss": 0.3069, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 2.5472, |
|
"grad_norm": 0.47126856730409306, |
|
"learning_rate": 2.2765734303294666e-05, |
|
"loss": 0.3219, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 0.5261114215080503, |
|
"learning_rate": 2.2588337629081107e-05, |
|
"loss": 0.3171, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.5728, |
|
"grad_norm": 0.5218592685513823, |
|
"learning_rate": 2.2410733605106462e-05, |
|
"loss": 0.3285, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 2.5856, |
|
"grad_norm": 0.49004562394385515, |
|
"learning_rate": 2.2232936459094158e-05, |
|
"loss": 0.3363, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 2.5984, |
|
"grad_norm": 0.5702026561037369, |
|
"learning_rate": 2.205496043423849e-05, |
|
"loss": 0.322, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 2.6112, |
|
"grad_norm": 0.4794169168636671, |
|
"learning_rate": 2.1876819788063586e-05, |
|
"loss": 0.3193, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 2.624, |
|
"grad_norm": 0.4988347779020925, |
|
"learning_rate": 2.16985287912813e-05, |
|
"loss": 0.3165, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 2.6368, |
|
"grad_norm": 0.5093981810238166, |
|
"learning_rate": 2.1520101726647922e-05, |
|
"loss": 0.3399, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 2.6496, |
|
"grad_norm": 0.4721884611441708, |
|
"learning_rate": 2.1341552887820048e-05, |
|
"loss": 0.3229, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 2.6624, |
|
"grad_norm": 0.49887073567592627, |
|
"learning_rate": 2.1162896578209517e-05, |
|
"loss": 0.3175, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 2.6752000000000002, |
|
"grad_norm": 0.5045437946272339, |
|
"learning_rate": 2.0984147109837564e-05, |
|
"loss": 0.3005, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 2.6879999999999997, |
|
"grad_norm": 0.49903024953277764, |
|
"learning_rate": 2.0805318802188307e-05, |
|
"loss": 0.3474, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.7008, |
|
"grad_norm": 0.5314703321453352, |
|
"learning_rate": 2.0626425981061608e-05, |
|
"loss": 0.3122, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 2.7136, |
|
"grad_norm": 0.5092684458014048, |
|
"learning_rate": 2.0447482977425465e-05, |
|
"loss": 0.3233, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 2.7264, |
|
"grad_norm": 0.47352836758947026, |
|
"learning_rate": 2.0268504126267952e-05, |
|
"loss": 0.301, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 2.7392, |
|
"grad_norm": 0.4860543590228337, |
|
"learning_rate": 2.008950376544887e-05, |
|
"loss": 0.3151, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 2.752, |
|
"grad_norm": 0.5257123347462322, |
|
"learning_rate": 1.9910496234551132e-05, |
|
"loss": 0.3193, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 2.7648, |
|
"grad_norm": 0.4943979366346464, |
|
"learning_rate": 1.9731495873732055e-05, |
|
"loss": 0.2958, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 2.7776, |
|
"grad_norm": 0.44952496033451056, |
|
"learning_rate": 1.9552517022574542e-05, |
|
"loss": 0.3068, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 2.7904, |
|
"grad_norm": 0.4758452886198691, |
|
"learning_rate": 1.93735740189384e-05, |
|
"loss": 0.3148, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 2.8032, |
|
"grad_norm": 0.4682473906979366, |
|
"learning_rate": 1.9194681197811703e-05, |
|
"loss": 0.3706, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 2.816, |
|
"grad_norm": 0.4791242451892492, |
|
"learning_rate": 1.901585289016244e-05, |
|
"loss": 0.3288, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.8288, |
|
"grad_norm": 0.5085817724900927, |
|
"learning_rate": 1.8837103421790486e-05, |
|
"loss": 0.3044, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 2.8416, |
|
"grad_norm": 0.48647263186682693, |
|
"learning_rate": 1.8658447112179952e-05, |
|
"loss": 0.3289, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 2.8544, |
|
"grad_norm": 0.5046420474541058, |
|
"learning_rate": 1.8479898273352084e-05, |
|
"loss": 0.3205, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 2.8672, |
|
"grad_norm": 0.5586278799937978, |
|
"learning_rate": 1.83014712087187e-05, |
|
"loss": 0.3203, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 0.46467805164179493, |
|
"learning_rate": 1.8123180211936417e-05, |
|
"loss": 0.313, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.8928000000000003, |
|
"grad_norm": 0.5133947729747058, |
|
"learning_rate": 1.794503956576152e-05, |
|
"loss": 0.3354, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 2.9055999999999997, |
|
"grad_norm": 0.4940637792212559, |
|
"learning_rate": 1.776706354090585e-05, |
|
"loss": 0.314, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 2.9184, |
|
"grad_norm": 0.46383168322908513, |
|
"learning_rate": 1.758926639489354e-05, |
|
"loss": 0.3496, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 2.9312, |
|
"grad_norm": 0.48213425867862153, |
|
"learning_rate": 1.7411662370918893e-05, |
|
"loss": 0.3217, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 2.944, |
|
"grad_norm": 0.4901278166547155, |
|
"learning_rate": 1.7234265696705344e-05, |
|
"loss": 0.3014, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.9568, |
|
"grad_norm": 0.4864530534987615, |
|
"learning_rate": 1.7057090583365678e-05, |
|
"loss": 0.3244, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 2.9696, |
|
"grad_norm": 0.4971270379983246, |
|
"learning_rate": 1.6880151224263646e-05, |
|
"loss": 0.2798, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 2.9824, |
|
"grad_norm": 0.48406441007747025, |
|
"learning_rate": 1.6703461793876876e-05, |
|
"loss": 0.3223, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 2.9952, |
|
"grad_norm": 0.4860868077756716, |
|
"learning_rate": 1.6527036446661396e-05, |
|
"loss": 0.3202, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 3.008, |
|
"grad_norm": 0.506416799803235, |
|
"learning_rate": 1.635088931591775e-05, |
|
"loss": 0.277, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 3.0208, |
|
"grad_norm": 0.6012281247858366, |
|
"learning_rate": 1.6175034512658753e-05, |
|
"loss": 0.2251, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 3.0336, |
|
"grad_norm": 0.3811428602824129, |
|
"learning_rate": 1.5999486124479115e-05, |
|
"loss": 0.2041, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 3.0464, |
|
"grad_norm": 0.5784667293009617, |
|
"learning_rate": 1.5824258214426833e-05, |
|
"loss": 0.2162, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 3.0592, |
|
"grad_norm": 0.705302440808711, |
|
"learning_rate": 1.5649364819876655e-05, |
|
"loss": 0.1914, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 3.072, |
|
"grad_norm": 0.4942964544263069, |
|
"learning_rate": 1.547481995140556e-05, |
|
"loss": 0.2047, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 3.0848, |
|
"grad_norm": 0.4421046816159049, |
|
"learning_rate": 1.5300637591670357e-05, |
|
"loss": 0.2131, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 3.0976, |
|
"grad_norm": 0.5079935773124865, |
|
"learning_rate": 1.5126831694287564e-05, |
|
"loss": 0.1727, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 3.1104, |
|
"grad_norm": 0.4901753704796098, |
|
"learning_rate": 1.4953416182715566e-05, |
|
"loss": 0.1913, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 3.1232, |
|
"grad_norm": 0.42921694755148015, |
|
"learning_rate": 1.478040494913926e-05, |
|
"loss": 0.1899, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 3.136, |
|
"grad_norm": 0.4176592012336345, |
|
"learning_rate": 1.460781185335713e-05, |
|
"loss": 0.2108, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 3.1488, |
|
"grad_norm": 0.4555936330018958, |
|
"learning_rate": 1.443565072167095e-05, |
|
"loss": 0.1944, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 3.1616, |
|
"grad_norm": 0.439819633982298, |
|
"learning_rate": 1.4263935345778202e-05, |
|
"loss": 0.1793, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 3.1744, |
|
"grad_norm": 0.4507068322369287, |
|
"learning_rate": 1.409267948166718e-05, |
|
"loss": 0.1987, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 3.1872, |
|
"grad_norm": 0.4267563221021692, |
|
"learning_rate": 1.3921896848515064e-05, |
|
"loss": 0.1786, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 0.3876171615365498, |
|
"learning_rate": 1.3751601127588849e-05, |
|
"loss": 0.1966, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 3.2128, |
|
"grad_norm": 0.4057833864281153, |
|
"learning_rate": 1.3581805961149371e-05, |
|
"loss": 0.1958, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 3.2256, |
|
"grad_norm": 0.4087899210587149, |
|
"learning_rate": 1.341252495135841e-05, |
|
"loss": 0.1852, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 3.2384, |
|
"grad_norm": 0.39407281907345965, |
|
"learning_rate": 1.324377165918906e-05, |
|
"loss": 0.1827, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 3.2512, |
|
"grad_norm": 0.35935266163921825, |
|
"learning_rate": 1.3075559603339354e-05, |
|
"loss": 0.1989, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 3.2640000000000002, |
|
"grad_norm": 0.37536457589473, |
|
"learning_rate": 1.2907902259149287e-05, |
|
"loss": 0.2041, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 3.2768, |
|
"grad_norm": 0.39428821774784384, |
|
"learning_rate": 1.274081305752135e-05, |
|
"loss": 0.196, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 3.2896, |
|
"grad_norm": 0.4006580445730096, |
|
"learning_rate": 1.2574305383844528e-05, |
|
"loss": 0.181, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 3.3024, |
|
"grad_norm": 0.40910842845823364, |
|
"learning_rate": 1.2408392576922075e-05, |
|
"loss": 0.207, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 3.3152, |
|
"grad_norm": 0.39974974632699173, |
|
"learning_rate": 1.2243087927902905e-05, |
|
"loss": 0.2047, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 3.328, |
|
"grad_norm": 0.38363020981213264, |
|
"learning_rate": 1.2078404679216864e-05, |
|
"loss": 0.1784, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 3.3407999999999998, |
|
"grad_norm": 0.3854610036805951, |
|
"learning_rate": 1.1914356023513904e-05, |
|
"loss": 0.2061, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 3.3536, |
|
"grad_norm": 0.4164002914164664, |
|
"learning_rate": 1.1750955102607193e-05, |
|
"loss": 0.2037, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 3.3664, |
|
"grad_norm": 0.3602169675364507, |
|
"learning_rate": 1.1588215006420374e-05, |
|
"loss": 0.1904, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 3.3792, |
|
"grad_norm": 0.3645159691692519, |
|
"learning_rate": 1.1426148771938915e-05, |
|
"loss": 0.1852, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 3.392, |
|
"grad_norm": 0.4029660604014823, |
|
"learning_rate": 1.1264769382165748e-05, |
|
"loss": 0.214, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 3.4048, |
|
"grad_norm": 0.38159597795606703, |
|
"learning_rate": 1.110408976508118e-05, |
|
"loss": 0.189, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 3.4176, |
|
"grad_norm": 0.38275208914530784, |
|
"learning_rate": 1.094412279260726e-05, |
|
"loss": 0.2009, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 3.4304, |
|
"grad_norm": 0.3723632786915106, |
|
"learning_rate": 1.0784881279576635e-05, |
|
"loss": 0.1869, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 3.4432, |
|
"grad_norm": 0.3678856931649929, |
|
"learning_rate": 1.0626377982705929e-05, |
|
"loss": 0.2174, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 3.456, |
|
"grad_norm": 0.3502192302775391, |
|
"learning_rate": 1.0468625599573842e-05, |
|
"loss": 0.1934, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 3.4688, |
|
"grad_norm": 0.3598509379361589, |
|
"learning_rate": 1.0311636767603952e-05, |
|
"loss": 0.2024, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 3.4816, |
|
"grad_norm": 0.3837829509093238, |
|
"learning_rate": 1.0155424063052306e-05, |
|
"loss": 0.1778, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 3.4944, |
|
"grad_norm": 0.37321603082177557, |
|
"learning_rate": 1.0000000000000006e-05, |
|
"loss": 0.2028, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 3.5072, |
|
"grad_norm": 0.3646624135896105, |
|
"learning_rate": 9.84537702935065e-06, |
|
"loss": 0.1795, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"grad_norm": 0.3619279737435615, |
|
"learning_rate": 9.691567537832964e-06, |
|
"loss": 0.1959, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 3.5328, |
|
"grad_norm": 0.4045915856656408, |
|
"learning_rate": 9.538583847008452e-06, |
|
"loss": 0.2002, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 3.5456, |
|
"grad_norm": 0.36699856081232585, |
|
"learning_rate": 9.386438212284372e-06, |
|
"loss": 0.1768, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 3.5584, |
|
"grad_norm": 0.3593940759056696, |
|
"learning_rate": 9.235142821931928e-06, |
|
"loss": 0.1831, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 3.5712, |
|
"grad_norm": 0.38036589475024113, |
|
"learning_rate": 9.084709796109907e-06, |
|
"loss": 0.1947, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 3.584, |
|
"grad_norm": 0.3854399777626176, |
|
"learning_rate": 8.93515118589373e-06, |
|
"loss": 0.1927, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 3.5968, |
|
"grad_norm": 0.35303194527568804, |
|
"learning_rate": 8.786478972310023e-06, |
|
"loss": 0.184, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 3.6096, |
|
"grad_norm": 0.3484026536415648, |
|
"learning_rate": 8.638705065376887e-06, |
|
"loss": 0.1848, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 3.6224, |
|
"grad_norm": 0.35254870824694956, |
|
"learning_rate": 8.491841303149728e-06, |
|
"loss": 0.2174, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 3.6352, |
|
"grad_norm": 0.35010223875661445, |
|
"learning_rate": 8.345899450772975e-06, |
|
"loss": 0.1808, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 3.648, |
|
"grad_norm": 0.33589842070128134, |
|
"learning_rate": 8.200891199537549e-06, |
|
"loss": 0.1849, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 3.6608, |
|
"grad_norm": 0.3417155292449215, |
|
"learning_rate": 8.056828165944282e-06, |
|
"loss": 0.1932, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 3.6736, |
|
"grad_norm": 0.337080699159681, |
|
"learning_rate": 7.913721890773354e-06, |
|
"loss": 0.1792, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 3.6864, |
|
"grad_norm": 0.35890218507105415, |
|
"learning_rate": 7.771583838159756e-06, |
|
"loss": 0.2147, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 3.6992000000000003, |
|
"grad_norm": 0.36129251338506446, |
|
"learning_rate": 7.630425394674903e-06, |
|
"loss": 0.2, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 3.7119999999999997, |
|
"grad_norm": 0.3959496367146204, |
|
"learning_rate": 7.49025786841445e-06, |
|
"loss": 0.2047, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 3.7248, |
|
"grad_norm": 0.3804537691867654, |
|
"learning_rate": 7.3510924880924575e-06, |
|
"loss": 0.2011, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 3.7376, |
|
"grad_norm": 0.3567951138023965, |
|
"learning_rate": 7.212940402141808e-06, |
|
"loss": 0.1977, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 3.7504, |
|
"grad_norm": 0.34862334322445726, |
|
"learning_rate": 7.075812677821145e-06, |
|
"loss": 0.1472, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 3.7632, |
|
"grad_norm": 0.3444326428697489, |
|
"learning_rate": 6.939720300328303e-06, |
|
"loss": 0.2068, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 3.776, |
|
"grad_norm": 0.34315467198982963, |
|
"learning_rate": 6.8046741719202385e-06, |
|
"loss": 0.1801, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 3.7888, |
|
"grad_norm": 0.36053148046849726, |
|
"learning_rate": 6.67068511103971e-06, |
|
"loss": 0.1797, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 3.8016, |
|
"grad_norm": 0.3446001058185602, |
|
"learning_rate": 6.537763851448593e-06, |
|
"loss": 0.1782, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 3.8144, |
|
"grad_norm": 0.3572721398571936, |
|
"learning_rate": 6.4059210413680175e-06, |
|
"loss": 0.1722, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 3.8272, |
|
"grad_norm": 0.3524926329338016, |
|
"learning_rate": 6.275167242625331e-06, |
|
"loss": 0.1868, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"grad_norm": 0.3392133753693257, |
|
"learning_rate": 6.145512929808013e-06, |
|
"loss": 0.1923, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 3.8528000000000002, |
|
"grad_norm": 0.37883269375786016, |
|
"learning_rate": 6.016968489424572e-06, |
|
"loss": 0.1846, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 3.8656, |
|
"grad_norm": 0.35621682181716785, |
|
"learning_rate": 5.889544219072465e-06, |
|
"loss": 0.1991, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 3.8784, |
|
"grad_norm": 0.34699611669774555, |
|
"learning_rate": 5.7632503266131925e-06, |
|
"loss": 0.1756, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 3.8912, |
|
"grad_norm": 0.331837247579926, |
|
"learning_rate": 5.638096929354522e-06, |
|
"loss": 0.2133, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 3.904, |
|
"grad_norm": 0.3358679464482438, |
|
"learning_rate": 5.514094053240035e-06, |
|
"loss": 0.2214, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 3.9168, |
|
"grad_norm": 0.35504197065657817, |
|
"learning_rate": 5.39125163204594e-06, |
|
"loss": 0.1761, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 3.9295999999999998, |
|
"grad_norm": 0.3311346372703272, |
|
"learning_rate": 5.269579506585259e-06, |
|
"loss": 0.198, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 3.9424, |
|
"grad_norm": 0.36682363964880216, |
|
"learning_rate": 5.149087423919541e-06, |
|
"loss": 0.187, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 3.9552, |
|
"grad_norm": 0.32597937356367246, |
|
"learning_rate": 5.029785036577976e-06, |
|
"loss": 0.2129, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 3.968, |
|
"grad_norm": 0.3380380052817791, |
|
"learning_rate": 4.911681901784198e-06, |
|
"loss": 0.2023, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 3.9808, |
|
"grad_norm": 0.33938858828486923, |
|
"learning_rate": 4.794787480690597e-06, |
|
"loss": 0.1951, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 3.9936, |
|
"grad_norm": 0.3488126726253992, |
|
"learning_rate": 4.679111137620442e-06, |
|
"loss": 0.1892, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 4.0064, |
|
"grad_norm": 0.3487996176824251, |
|
"learning_rate": 4.5646621393177e-06, |
|
"loss": 0.1691, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 4.0192, |
|
"grad_norm": 0.3736447589429214, |
|
"learning_rate": 4.451449654204685e-06, |
|
"loss": 0.144, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 4.032, |
|
"grad_norm": 0.3384671060788562, |
|
"learning_rate": 4.339482751647557e-06, |
|
"loss": 0.1548, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 4.0448, |
|
"grad_norm": 0.29592263318995493, |
|
"learning_rate": 4.228770401229824e-06, |
|
"loss": 0.1324, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 4.0576, |
|
"grad_norm": 0.25723587257926206, |
|
"learning_rate": 4.119321472033779e-06, |
|
"loss": 0.1354, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 4.0704, |
|
"grad_norm": 0.29994660788305383, |
|
"learning_rate": 4.011144731929981e-06, |
|
"loss": 0.1374, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 4.0832, |
|
"grad_norm": 0.34946760400650806, |
|
"learning_rate": 3.904248846874894e-06, |
|
"loss": 0.1503, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 4.096, |
|
"grad_norm": 0.38995283037498696, |
|
"learning_rate": 3.7986423802166705e-06, |
|
"loss": 0.1595, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 4.1088, |
|
"grad_norm": 0.366017265083761, |
|
"learning_rate": 3.694333792009115e-06, |
|
"loss": 0.1502, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 4.1216, |
|
"grad_norm": 0.36389466953807864, |
|
"learning_rate": 3.5913314383339937e-06, |
|
"loss": 0.15, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 4.1344, |
|
"grad_norm": 0.3170091114058245, |
|
"learning_rate": 3.4896435706316e-06, |
|
"loss": 0.1404, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 4.1472, |
|
"grad_norm": 0.26593367619599734, |
|
"learning_rate": 3.3892783350397675e-06, |
|
"loss": 0.1187, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"grad_norm": 0.2718128922765933, |
|
"learning_rate": 3.290243771741275e-06, |
|
"loss": 0.1432, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 4.1728, |
|
"grad_norm": 0.2954806585268095, |
|
"learning_rate": 3.1925478143197418e-06, |
|
"loss": 0.1103, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 4.1856, |
|
"grad_norm": 0.2814929538727662, |
|
"learning_rate": 3.0961982891241083e-06, |
|
"loss": 0.157, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 4.1984, |
|
"grad_norm": 0.2696282300167148, |
|
"learning_rate": 3.001202914641628e-06, |
|
"loss": 0.1246, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 4.2112, |
|
"grad_norm": 0.26906920340312424, |
|
"learning_rate": 2.907569300879596e-06, |
|
"loss": 0.1198, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 4.224, |
|
"grad_norm": 0.24892417900423874, |
|
"learning_rate": 2.815304948755664e-06, |
|
"loss": 0.1283, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 4.2368, |
|
"grad_norm": 0.27222961061443396, |
|
"learning_rate": 2.7244172494969978e-06, |
|
"loss": 0.1648, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 4.2496, |
|
"grad_norm": 0.23693650059319968, |
|
"learning_rate": 2.6349134840481294e-06, |
|
"loss": 0.1231, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 4.2624, |
|
"grad_norm": 0.24598374781410315, |
|
"learning_rate": 2.546800822487714e-06, |
|
"loss": 0.1239, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 4.2752, |
|
"grad_norm": 0.26142026108083005, |
|
"learning_rate": 2.4600863234541338e-06, |
|
"loss": 0.1518, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 4.288, |
|
"grad_norm": 0.25507881255355563, |
|
"learning_rate": 2.374776933580025e-06, |
|
"loss": 0.1558, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 4.3008, |
|
"grad_norm": 0.2637840647987734, |
|
"learning_rate": 2.2908794869358044e-06, |
|
"loss": 0.1419, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 4.3136, |
|
"grad_norm": 0.2628127561012397, |
|
"learning_rate": 2.2084007044821764e-06, |
|
"loss": 0.1175, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 4.3264, |
|
"grad_norm": 0.2679837646112785, |
|
"learning_rate": 2.127347193531757e-06, |
|
"loss": 0.1366, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 4.3392, |
|
"grad_norm": 0.2632869027854267, |
|
"learning_rate": 2.0477254472197237e-06, |
|
"loss": 0.133, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 4.352, |
|
"grad_norm": 0.2574616441403653, |
|
"learning_rate": 1.96954184398368e-06, |
|
"loss": 0.1473, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 4.3648, |
|
"grad_norm": 0.2567180790770113, |
|
"learning_rate": 1.8928026470526917e-06, |
|
"loss": 0.1529, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 4.3776, |
|
"grad_norm": 0.25759651968280495, |
|
"learning_rate": 1.817514003945524e-06, |
|
"loss": 0.1389, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 4.3904, |
|
"grad_norm": 0.2694492179857689, |
|
"learning_rate": 1.743681945978184e-06, |
|
"loss": 0.1676, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 4.4032, |
|
"grad_norm": 0.2542976978117736, |
|
"learning_rate": 1.6713123877807413e-06, |
|
"loss": 0.1264, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 4.416, |
|
"grad_norm": 0.2587661015213375, |
|
"learning_rate": 1.6004111268235156e-06, |
|
"loss": 0.1754, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 4.4288, |
|
"grad_norm": 0.2577619521286932, |
|
"learning_rate": 1.5309838429526714e-06, |
|
"loss": 0.1308, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 4.4416, |
|
"grad_norm": 0.2482204520963082, |
|
"learning_rate": 1.4630360979351644e-06, |
|
"loss": 0.1363, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 4.4544, |
|
"grad_norm": 0.26815500281534765, |
|
"learning_rate": 1.396573335013236e-06, |
|
"loss": 0.1551, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 4.4672, |
|
"grad_norm": 0.25071743077202685, |
|
"learning_rate": 1.3316008784683265e-06, |
|
"loss": 0.1564, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"grad_norm": 0.2527811681338482, |
|
"learning_rate": 1.2681239331945695e-06, |
|
"loss": 0.1429, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 4.4928, |
|
"grad_norm": 0.2472715666298151, |
|
"learning_rate": 1.2061475842818337e-06, |
|
"loss": 0.1276, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 4.5056, |
|
"grad_norm": 0.2658156327118546, |
|
"learning_rate": 1.1456767966083393e-06, |
|
"loss": 0.141, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 4.5184, |
|
"grad_norm": 0.245573731147634, |
|
"learning_rate": 1.086716414442952e-06, |
|
"loss": 0.1394, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 4.5312, |
|
"grad_norm": 0.2426493496398299, |
|
"learning_rate": 1.0292711610570904e-06, |
|
"loss": 0.1603, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 4.5440000000000005, |
|
"grad_norm": 0.24716411463307858, |
|
"learning_rate": 9.733456383463658e-07, |
|
"loss": 0.1437, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 4.5568, |
|
"grad_norm": 0.2487733898003392, |
|
"learning_rate": 9.189443264619102e-07, |
|
"loss": 0.1317, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 4.5696, |
|
"grad_norm": 0.2316370048474447, |
|
"learning_rate": 8.660715834514977e-07, |
|
"loss": 0.1242, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 4.5824, |
|
"grad_norm": 0.2448921595410395, |
|
"learning_rate": 8.147316449103959e-07, |
|
"loss": 0.1468, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 4.5952, |
|
"grad_norm": 0.23495875452719822, |
|
"learning_rate": 7.649286236420806e-07, |
|
"loss": 0.1215, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 4.608, |
|
"grad_norm": 0.24476071444081385, |
|
"learning_rate": 7.166665093287539e-07, |
|
"loss": 0.1299, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 4.6208, |
|
"grad_norm": 0.25342925971274427, |
|
"learning_rate": 6.69949168211721e-07, |
|
"loss": 0.1381, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 4.6336, |
|
"grad_norm": 0.24807086311102455, |
|
"learning_rate": 6.247803427816945e-07, |
|
"loss": 0.1371, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 4.6464, |
|
"grad_norm": 0.24989429343702707, |
|
"learning_rate": 5.811636514789598e-07, |
|
"loss": 0.1551, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 4.6592, |
|
"grad_norm": 0.2478877254023866, |
|
"learning_rate": 5.391025884035239e-07, |
|
"loss": 0.1214, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 4.672, |
|
"grad_norm": 0.24910977380194788, |
|
"learning_rate": 4.986005230351954e-07, |
|
"loss": 0.1266, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 4.6848, |
|
"grad_norm": 0.25184436317148107, |
|
"learning_rate": 4.5966069996365993e-07, |
|
"loss": 0.1231, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 4.6975999999999996, |
|
"grad_norm": 0.26081502123563133, |
|
"learning_rate": 4.22286238628562e-07, |
|
"loss": 0.1411, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 4.7104, |
|
"grad_norm": 0.253071429048111, |
|
"learning_rate": 3.8648013306960664e-07, |
|
"loss": 0.1375, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 4.7232, |
|
"grad_norm": 0.2540425002906153, |
|
"learning_rate": 3.522452516867048e-07, |
|
"loss": 0.136, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 4.736, |
|
"grad_norm": 0.2574206686510939, |
|
"learning_rate": 3.1958433701019697e-07, |
|
"loss": 0.1307, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 4.7488, |
|
"grad_norm": 0.2609227567583143, |
|
"learning_rate": 2.8850000548115155e-07, |
|
"loss": 0.1346, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 4.7616, |
|
"grad_norm": 0.254691465480472, |
|
"learning_rate": 2.5899474724174313e-07, |
|
"loss": 0.158, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 4.7744, |
|
"grad_norm": 0.2581011350666695, |
|
"learning_rate": 2.3107092593579905e-07, |
|
"loss": 0.1263, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 4.7872, |
|
"grad_norm": 0.24198525732995088, |
|
"learning_rate": 2.0473077851942858e-07, |
|
"loss": 0.1193, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"grad_norm": 0.25164331206812923, |
|
"learning_rate": 1.799764150818306e-07, |
|
"loss": 0.1266, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 4.8128, |
|
"grad_norm": 0.2492077783545167, |
|
"learning_rate": 1.5680981867625566e-07, |
|
"loss": 0.137, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 4.8256, |
|
"grad_norm": 0.2436272770874148, |
|
"learning_rate": 1.3523284516113955e-07, |
|
"loss": 0.1353, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 4.8384, |
|
"grad_norm": 0.2502879339931626, |
|
"learning_rate": 1.1524722305144231e-07, |
|
"loss": 0.1159, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 4.8512, |
|
"grad_norm": 0.2442634210698441, |
|
"learning_rate": 9.685455338016347e-08, |
|
"loss": 0.1375, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 4.864, |
|
"grad_norm": 0.24280458947882716, |
|
"learning_rate": 8.005630957010014e-08, |
|
"loss": 0.1151, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 4.8768, |
|
"grad_norm": 0.2558531188851725, |
|
"learning_rate": 6.485383731580142e-08, |
|
"loss": 0.1403, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 4.8896, |
|
"grad_norm": 0.25245664987849675, |
|
"learning_rate": 5.1248354475768034e-08, |
|
"loss": 0.11, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 4.9024, |
|
"grad_norm": 0.2545467999410838, |
|
"learning_rate": 3.924095097489922e-08, |
|
"loss": 0.147, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 4.9152000000000005, |
|
"grad_norm": 0.23852493560647273, |
|
"learning_rate": 2.8832588717164766e-08, |
|
"loss": 0.133, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 4.928, |
|
"grad_norm": 0.2578644326341971, |
|
"learning_rate": 2.0024101508555604e-08, |
|
"loss": 0.1375, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 4.9408, |
|
"grad_norm": 0.24082567476482564, |
|
"learning_rate": 1.281619499029274e-08, |
|
"loss": 0.1476, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 4.9536, |
|
"grad_norm": 0.2581628432525251, |
|
"learning_rate": 7.209446582292501e-09, |
|
"loss": 0.1213, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 4.9664, |
|
"grad_norm": 0.25077241087237473, |
|
"learning_rate": 3.2043054369057523e-09, |
|
"loss": 0.1705, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 4.9792, |
|
"grad_norm": 0.2294943734104996, |
|
"learning_rate": 8.010924029533406e-10, |
|
"loss": 0.1098, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 4.992, |
|
"grad_norm": 0.25551200611550673, |
|
"learning_rate": 0.0, |
|
"loss": 0.1474, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 4.992, |
|
"step": 390, |
|
"total_flos": 5.514668037678039e+17, |
|
"train_loss": 0.39462805023560155, |
|
"train_runtime": 20897.0448, |
|
"train_samples_per_second": 2.393, |
|
"train_steps_per_second": 0.019 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 390, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.514668037678039e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|