|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.992, |
|
"eval_steps": 500, |
|
"global_step": 390, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0128, |
|
"grad_norm": 6.917312863904752, |
|
"learning_rate": 1.0256410256410257e-06, |
|
"loss": 0.8778, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0256, |
|
"grad_norm": 7.115675932218564, |
|
"learning_rate": 2.0512820512820513e-06, |
|
"loss": 0.8978, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0384, |
|
"grad_norm": 7.143090959616597, |
|
"learning_rate": 3.0769230769230774e-06, |
|
"loss": 0.8836, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0512, |
|
"grad_norm": 6.547261440554129, |
|
"learning_rate": 4.102564102564103e-06, |
|
"loss": 0.8536, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.064, |
|
"grad_norm": 5.080727646487042, |
|
"learning_rate": 5.128205128205128e-06, |
|
"loss": 0.8178, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0768, |
|
"grad_norm": 2.9238509462148383, |
|
"learning_rate": 6.153846153846155e-06, |
|
"loss": 0.7305, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0896, |
|
"grad_norm": 2.1194750359479344, |
|
"learning_rate": 7.17948717948718e-06, |
|
"loss": 0.7348, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.1024, |
|
"grad_norm": 3.762748319278985, |
|
"learning_rate": 8.205128205128205e-06, |
|
"loss": 0.7398, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.1152, |
|
"grad_norm": 3.939983995799894, |
|
"learning_rate": 9.230769230769232e-06, |
|
"loss": 0.6939, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.128, |
|
"grad_norm": 4.4957667148675045, |
|
"learning_rate": 1.0256410256410256e-05, |
|
"loss": 0.6766, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.1408, |
|
"grad_norm": 4.466339084180279, |
|
"learning_rate": 1.1282051282051283e-05, |
|
"loss": 0.6567, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.1536, |
|
"grad_norm": 3.1065964129262578, |
|
"learning_rate": 1.230769230769231e-05, |
|
"loss": 0.6419, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.1664, |
|
"grad_norm": 2.129844525633678, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 0.6066, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.1792, |
|
"grad_norm": 2.6341650255331666, |
|
"learning_rate": 1.435897435897436e-05, |
|
"loss": 0.5788, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.192, |
|
"grad_norm": 2.4851280434276903, |
|
"learning_rate": 1.5384615384615387e-05, |
|
"loss": 0.5903, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.2048, |
|
"grad_norm": 1.7306485929486228, |
|
"learning_rate": 1.641025641025641e-05, |
|
"loss": 0.5908, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.2176, |
|
"grad_norm": 1.1989593380058583, |
|
"learning_rate": 1.7435897435897438e-05, |
|
"loss": 0.5816, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.2304, |
|
"grad_norm": 1.5709794458589184, |
|
"learning_rate": 1.8461538461538465e-05, |
|
"loss": 0.5451, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.2432, |
|
"grad_norm": 1.2712863753195993, |
|
"learning_rate": 1.9487179487179488e-05, |
|
"loss": 0.5311, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.256, |
|
"grad_norm": 0.7593234560614043, |
|
"learning_rate": 2.0512820512820512e-05, |
|
"loss": 0.5728, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.2688, |
|
"grad_norm": 1.0229247297773885, |
|
"learning_rate": 2.153846153846154e-05, |
|
"loss": 0.5259, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.2816, |
|
"grad_norm": 1.0575624137381636, |
|
"learning_rate": 2.2564102564102566e-05, |
|
"loss": 0.536, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.2944, |
|
"grad_norm": 0.7079152316748596, |
|
"learning_rate": 2.3589743589743593e-05, |
|
"loss": 0.5312, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.3072, |
|
"grad_norm": 0.9714120507188312, |
|
"learning_rate": 2.461538461538462e-05, |
|
"loss": 0.5134, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.7949976651512598, |
|
"learning_rate": 2.5641025641025646e-05, |
|
"loss": 0.5191, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.3328, |
|
"grad_norm": 0.9651032085990672, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 0.521, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.3456, |
|
"grad_norm": 0.8248971552417219, |
|
"learning_rate": 2.7692307692307694e-05, |
|
"loss": 0.5346, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.3584, |
|
"grad_norm": 0.6876941086023203, |
|
"learning_rate": 2.871794871794872e-05, |
|
"loss": 0.5197, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.3712, |
|
"grad_norm": 0.7726526234427976, |
|
"learning_rate": 2.9743589743589747e-05, |
|
"loss": 0.5334, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.384, |
|
"grad_norm": 0.8622968341305126, |
|
"learning_rate": 3.0769230769230774e-05, |
|
"loss": 0.5174, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.3968, |
|
"grad_norm": 0.7178794371288015, |
|
"learning_rate": 3.1794871794871795e-05, |
|
"loss": 0.4888, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.4096, |
|
"grad_norm": 0.6291964033066837, |
|
"learning_rate": 3.282051282051282e-05, |
|
"loss": 0.5014, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.4224, |
|
"grad_norm": 0.6304774189343574, |
|
"learning_rate": 3.384615384615385e-05, |
|
"loss": 0.4776, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.4352, |
|
"grad_norm": 0.6509761608633534, |
|
"learning_rate": 3.4871794871794875e-05, |
|
"loss": 0.4812, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.448, |
|
"grad_norm": 0.7024262808757761, |
|
"learning_rate": 3.58974358974359e-05, |
|
"loss": 0.4929, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.4608, |
|
"grad_norm": 0.7469272638066381, |
|
"learning_rate": 3.692307692307693e-05, |
|
"loss": 0.515, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.4736, |
|
"grad_norm": 0.7063033600086017, |
|
"learning_rate": 3.794871794871795e-05, |
|
"loss": 0.5154, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.4864, |
|
"grad_norm": 0.6891954146829927, |
|
"learning_rate": 3.8974358974358976e-05, |
|
"loss": 0.5247, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.4992, |
|
"grad_norm": 0.729694958902066, |
|
"learning_rate": 4e-05, |
|
"loss": 0.4879, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.512, |
|
"grad_norm": 0.6293498574203988, |
|
"learning_rate": 3.9999198907597046e-05, |
|
"loss": 0.5072, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.5248, |
|
"grad_norm": 0.7067898835951391, |
|
"learning_rate": 3.9996795694563096e-05, |
|
"loss": 0.4859, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.5376, |
|
"grad_norm": 0.6792215516455709, |
|
"learning_rate": 3.999279055341771e-05, |
|
"loss": 0.4762, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.5504, |
|
"grad_norm": 0.7112534998939477, |
|
"learning_rate": 3.998718380500971e-05, |
|
"loss": 0.4966, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.5632, |
|
"grad_norm": 0.8917671882383743, |
|
"learning_rate": 3.997997589849145e-05, |
|
"loss": 0.4879, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.576, |
|
"grad_norm": 0.7164453252644027, |
|
"learning_rate": 3.9971167411282835e-05, |
|
"loss": 0.4682, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.5888, |
|
"grad_norm": 0.9202895733189819, |
|
"learning_rate": 3.99607590490251e-05, |
|
"loss": 0.4737, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.6016, |
|
"grad_norm": 0.6666361003438368, |
|
"learning_rate": 3.9948751645524235e-05, |
|
"loss": 0.4692, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.6144, |
|
"grad_norm": 0.8993138978977713, |
|
"learning_rate": 3.9935146162684206e-05, |
|
"loss": 0.5134, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.6272, |
|
"grad_norm": 0.7819898848123767, |
|
"learning_rate": 3.9919943690429906e-05, |
|
"loss": 0.4615, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.7700009217327806, |
|
"learning_rate": 3.9903145446619837e-05, |
|
"loss": 0.4936, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.6528, |
|
"grad_norm": 0.7749980808284274, |
|
"learning_rate": 3.9884752776948564e-05, |
|
"loss": 0.478, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.6656, |
|
"grad_norm": 0.7193450266009187, |
|
"learning_rate": 3.9864767154838864e-05, |
|
"loss": 0.4747, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.6784, |
|
"grad_norm": 0.6600121021852756, |
|
"learning_rate": 3.9843190181323744e-05, |
|
"loss": 0.4628, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.6912, |
|
"grad_norm": 0.5917480390981291, |
|
"learning_rate": 3.982002358491817e-05, |
|
"loss": 0.5044, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.704, |
|
"grad_norm": 0.6527439856884594, |
|
"learning_rate": 3.979526922148058e-05, |
|
"loss": 0.4969, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.7168, |
|
"grad_norm": 0.6245050670150339, |
|
"learning_rate": 3.9768929074064206e-05, |
|
"loss": 0.4634, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.7296, |
|
"grad_norm": 0.5683956077196995, |
|
"learning_rate": 3.9741005252758255e-05, |
|
"loss": 0.4987, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.7424, |
|
"grad_norm": 0.6824717383950719, |
|
"learning_rate": 3.971149999451886e-05, |
|
"loss": 0.465, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.7552, |
|
"grad_norm": 0.8133420587406068, |
|
"learning_rate": 3.9680415662989806e-05, |
|
"loss": 0.4777, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.768, |
|
"grad_norm": 0.5905406037427718, |
|
"learning_rate": 3.9647754748313294e-05, |
|
"loss": 0.4701, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.7808, |
|
"grad_norm": 0.5285981231850914, |
|
"learning_rate": 3.96135198669304e-05, |
|
"loss": 0.4747, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.7936, |
|
"grad_norm": 0.6448941053978279, |
|
"learning_rate": 3.957771376137144e-05, |
|
"loss": 0.4866, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.8064, |
|
"grad_norm": 0.6070514524417691, |
|
"learning_rate": 3.954033930003634e-05, |
|
"loss": 0.4769, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.8192, |
|
"grad_norm": 0.636962417422208, |
|
"learning_rate": 3.9501399476964806e-05, |
|
"loss": 0.4619, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.832, |
|
"grad_norm": 0.6305102055156264, |
|
"learning_rate": 3.946089741159648e-05, |
|
"loss": 0.4775, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.8448, |
|
"grad_norm": 0.5996436733494288, |
|
"learning_rate": 3.9418836348521045e-05, |
|
"loss": 0.4796, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.8576, |
|
"grad_norm": 0.836070659374884, |
|
"learning_rate": 3.937521965721831e-05, |
|
"loss": 0.4589, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.8704, |
|
"grad_norm": 0.7862525734167326, |
|
"learning_rate": 3.933005083178828e-05, |
|
"loss": 0.4888, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.8832, |
|
"grad_norm": 0.66679339503122, |
|
"learning_rate": 3.928333349067125e-05, |
|
"loss": 0.4614, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.896, |
|
"grad_norm": 0.7166546763180385, |
|
"learning_rate": 3.923507137635792e-05, |
|
"loss": 0.4771, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.9088, |
|
"grad_norm": 0.9045232117210558, |
|
"learning_rate": 3.9185268355089606e-05, |
|
"loss": 0.4738, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.9216, |
|
"grad_norm": 0.5864164936190818, |
|
"learning_rate": 3.913392841654851e-05, |
|
"loss": 0.4758, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.9344, |
|
"grad_norm": 0.8376736221861006, |
|
"learning_rate": 3.9081055673538093e-05, |
|
"loss": 0.4869, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.9472, |
|
"grad_norm": 0.5930708646985897, |
|
"learning_rate": 3.902665436165364e-05, |
|
"loss": 0.4792, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.6959142572580448, |
|
"learning_rate": 3.897072883894291e-05, |
|
"loss": 0.467, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.9728, |
|
"grad_norm": 0.7695011224043343, |
|
"learning_rate": 3.8913283585557054e-05, |
|
"loss": 0.4637, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.9856, |
|
"grad_norm": 0.7781393617335021, |
|
"learning_rate": 3.885432320339167e-05, |
|
"loss": 0.4686, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.9984, |
|
"grad_norm": 0.6016159368333164, |
|
"learning_rate": 3.879385241571817e-05, |
|
"loss": 0.477, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.0112, |
|
"grad_norm": 0.7325602634576774, |
|
"learning_rate": 3.873187606680543e-05, |
|
"loss": 0.3618, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.024, |
|
"grad_norm": 0.579918445686115, |
|
"learning_rate": 3.866839912153168e-05, |
|
"loss": 0.3828, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.0368, |
|
"grad_norm": 0.6300933561194503, |
|
"learning_rate": 3.860342666498677e-05, |
|
"loss": 0.3577, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.0496, |
|
"grad_norm": 0.7166897812510176, |
|
"learning_rate": 3.853696390206484e-05, |
|
"loss": 0.4064, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.0624, |
|
"grad_norm": 0.6134723000268245, |
|
"learning_rate": 3.846901615704734e-05, |
|
"loss": 0.3388, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.0752, |
|
"grad_norm": 0.6485054609510611, |
|
"learning_rate": 3.839958887317649e-05, |
|
"loss": 0.3566, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.088, |
|
"grad_norm": 0.6086765543022894, |
|
"learning_rate": 3.832868761221926e-05, |
|
"loss": 0.3685, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.1008, |
|
"grad_norm": 0.6436282196870952, |
|
"learning_rate": 3.825631805402182e-05, |
|
"loss": 0.343, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 1.1136, |
|
"grad_norm": 0.6136703693399181, |
|
"learning_rate": 3.818248599605448e-05, |
|
"loss": 0.3627, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 1.1264, |
|
"grad_norm": 0.6597854461545477, |
|
"learning_rate": 3.810719735294731e-05, |
|
"loss": 0.4019, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.1392, |
|
"grad_norm": 0.6976091101182352, |
|
"learning_rate": 3.8030458156016326e-05, |
|
"loss": 0.35, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.152, |
|
"grad_norm": 0.6012919125740401, |
|
"learning_rate": 3.795227455278029e-05, |
|
"loss": 0.353, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.1648, |
|
"grad_norm": 0.7099386640811836, |
|
"learning_rate": 3.787265280646825e-05, |
|
"loss": 0.3568, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.1776, |
|
"grad_norm": 0.5779312698308307, |
|
"learning_rate": 3.7791599295517825e-05, |
|
"loss": 0.3473, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.1904, |
|
"grad_norm": 0.6701143291441497, |
|
"learning_rate": 3.7709120513064196e-05, |
|
"loss": 0.3661, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.2032, |
|
"grad_norm": 0.5438415019228182, |
|
"learning_rate": 3.762522306641998e-05, |
|
"loss": 0.3618, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.216, |
|
"grad_norm": 0.5882049949982814, |
|
"learning_rate": 3.7539913676545874e-05, |
|
"loss": 0.3942, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.2288000000000001, |
|
"grad_norm": 0.4701506172491297, |
|
"learning_rate": 3.745319917751229e-05, |
|
"loss": 0.3467, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.2416, |
|
"grad_norm": 0.6001712253990339, |
|
"learning_rate": 3.736508651595188e-05, |
|
"loss": 0.3432, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.2544, |
|
"grad_norm": 0.5513168302131914, |
|
"learning_rate": 3.727558275050301e-05, |
|
"loss": 0.3436, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.2671999999999999, |
|
"grad_norm": 0.5922895209718582, |
|
"learning_rate": 3.718469505124434e-05, |
|
"loss": 0.3437, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.6108707802859178, |
|
"learning_rate": 3.709243069912041e-05, |
|
"loss": 0.3461, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.2928, |
|
"grad_norm": 0.5231668478617609, |
|
"learning_rate": 3.699879708535838e-05, |
|
"loss": 0.3348, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.3056, |
|
"grad_norm": 0.7410731078341939, |
|
"learning_rate": 3.69038017108759e-05, |
|
"loss": 0.3477, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.3184, |
|
"grad_norm": 0.48288301746598766, |
|
"learning_rate": 3.680745218568026e-05, |
|
"loss": 0.353, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.3312, |
|
"grad_norm": 0.7512257208564571, |
|
"learning_rate": 3.6709756228258735e-05, |
|
"loss": 0.3887, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.3439999999999999, |
|
"grad_norm": 0.5287591533630462, |
|
"learning_rate": 3.6610721664960236e-05, |
|
"loss": 0.3551, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.3568, |
|
"grad_norm": 0.6514949600652048, |
|
"learning_rate": 3.65103564293684e-05, |
|
"loss": 0.3529, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.3696, |
|
"grad_norm": 0.5624863544283835, |
|
"learning_rate": 3.640866856166601e-05, |
|
"loss": 0.3419, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.3824, |
|
"grad_norm": 0.566762007230502, |
|
"learning_rate": 3.6305666207990886e-05, |
|
"loss": 0.3581, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.3952, |
|
"grad_norm": 0.5277592599377072, |
|
"learning_rate": 3.6201357619783336e-05, |
|
"loss": 0.343, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.408, |
|
"grad_norm": 0.5116061122773884, |
|
"learning_rate": 3.609575115312511e-05, |
|
"loss": 0.3614, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.4208, |
|
"grad_norm": 0.4594112497751582, |
|
"learning_rate": 3.598885526807003e-05, |
|
"loss": 0.3548, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.4336, |
|
"grad_norm": 0.4894830836405895, |
|
"learning_rate": 3.5880678527966224e-05, |
|
"loss": 0.3599, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.4464000000000001, |
|
"grad_norm": 0.5627245095789299, |
|
"learning_rate": 3.577122959877017e-05, |
|
"loss": 0.3465, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.4592, |
|
"grad_norm": 0.5580236130428585, |
|
"learning_rate": 3.566051724835245e-05, |
|
"loss": 0.365, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.472, |
|
"grad_norm": 0.5464546445092676, |
|
"learning_rate": 3.554855034579532e-05, |
|
"loss": 0.3733, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.4848, |
|
"grad_norm": 0.48289230111879294, |
|
"learning_rate": 3.5435337860682304e-05, |
|
"loss": 0.348, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.4976, |
|
"grad_norm": 0.48490496766157387, |
|
"learning_rate": 3.532088886237956e-05, |
|
"loss": 0.389, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.5104, |
|
"grad_norm": 0.5152479812331888, |
|
"learning_rate": 3.520521251930941e-05, |
|
"loss": 0.3745, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.5232, |
|
"grad_norm": 0.52389539970339, |
|
"learning_rate": 3.5088318098215805e-05, |
|
"loss": 0.3546, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.536, |
|
"grad_norm": 0.5087714025796426, |
|
"learning_rate": 3.497021496342203e-05, |
|
"loss": 0.3496, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.5488, |
|
"grad_norm": 0.596455971246047, |
|
"learning_rate": 3.485091257608047e-05, |
|
"loss": 0.3369, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.5615999999999999, |
|
"grad_norm": 0.546204923669321, |
|
"learning_rate": 3.473042049341474e-05, |
|
"loss": 0.3439, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.5744, |
|
"grad_norm": 0.6089651252091146, |
|
"learning_rate": 3.4608748367954064e-05, |
|
"loss": 0.3858, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.5872000000000002, |
|
"grad_norm": 0.5123329944742488, |
|
"learning_rate": 3.4485905946759965e-05, |
|
"loss": 0.3399, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.5460750230352305, |
|
"learning_rate": 3.4361903070645484e-05, |
|
"loss": 0.3433, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.6128, |
|
"grad_norm": 0.4842152363327416, |
|
"learning_rate": 3.423674967338681e-05, |
|
"loss": 0.3504, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.6256, |
|
"grad_norm": 0.4831525746417104, |
|
"learning_rate": 3.411045578092754e-05, |
|
"loss": 0.3324, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.6383999999999999, |
|
"grad_norm": 0.47203152665523074, |
|
"learning_rate": 3.398303151057543e-05, |
|
"loss": 0.3363, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.6512, |
|
"grad_norm": 0.4667572772193624, |
|
"learning_rate": 3.385448707019199e-05, |
|
"loss": 0.3557, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.6640000000000001, |
|
"grad_norm": 0.5313968132068019, |
|
"learning_rate": 3.372483275737468e-05, |
|
"loss": 0.3377, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.6768, |
|
"grad_norm": 0.4603288687508432, |
|
"learning_rate": 3.359407895863199e-05, |
|
"loss": 0.3596, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.6896, |
|
"grad_norm": 0.5704329499820529, |
|
"learning_rate": 3.34622361485514e-05, |
|
"loss": 0.3576, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.7024, |
|
"grad_norm": 0.49024553172019536, |
|
"learning_rate": 3.332931488896029e-05, |
|
"loss": 0.3578, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.7151999999999998, |
|
"grad_norm": 0.4441736094109462, |
|
"learning_rate": 3.319532582807977e-05, |
|
"loss": 0.3883, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.728, |
|
"grad_norm": 0.5054012189818847, |
|
"learning_rate": 3.30602796996717e-05, |
|
"loss": 0.3577, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.7408000000000001, |
|
"grad_norm": 0.45990759797573905, |
|
"learning_rate": 3.2924187322178865e-05, |
|
"loss": 0.3384, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.7536, |
|
"grad_norm": 0.5042972012753414, |
|
"learning_rate": 3.278705959785821e-05, |
|
"loss": 0.3522, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.7664, |
|
"grad_norm": 0.47173263555770334, |
|
"learning_rate": 3.2648907511907544e-05, |
|
"loss": 0.3588, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.7792, |
|
"grad_norm": 0.536218080491284, |
|
"learning_rate": 3.250974213158555e-05, |
|
"loss": 0.3353, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.792, |
|
"grad_norm": 0.5291824051748952, |
|
"learning_rate": 3.23695746053251e-05, |
|
"loss": 0.3352, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.8048, |
|
"grad_norm": 0.5388290949320104, |
|
"learning_rate": 3.222841616184025e-05, |
|
"loss": 0.3624, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.8176, |
|
"grad_norm": 0.47325434101138114, |
|
"learning_rate": 3.208627810922665e-05, |
|
"loss": 0.3497, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.8304, |
|
"grad_norm": 0.49540812516736116, |
|
"learning_rate": 3.194317183405573e-05, |
|
"loss": 0.3308, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.8432, |
|
"grad_norm": 0.5034044255655133, |
|
"learning_rate": 3.1799108800462466e-05, |
|
"loss": 0.3219, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.8559999999999999, |
|
"grad_norm": 0.4712459982237337, |
|
"learning_rate": 3.1654100549227024e-05, |
|
"loss": 0.3676, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.8688, |
|
"grad_norm": 0.43988904870746354, |
|
"learning_rate": 3.1508158696850275e-05, |
|
"loss": 0.3727, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.8816000000000002, |
|
"grad_norm": 0.4688533638844716, |
|
"learning_rate": 3.136129493462312e-05, |
|
"loss": 0.3596, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.8944, |
|
"grad_norm": 0.4395990789411024, |
|
"learning_rate": 3.121352102768998e-05, |
|
"loss": 0.3511, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.9072, |
|
"grad_norm": 0.4629286104729869, |
|
"learning_rate": 3.106484881410628e-05, |
|
"loss": 0.371, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.4299981843603125, |
|
"learning_rate": 3.091529020389009e-05, |
|
"loss": 0.3613, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.9327999999999999, |
|
"grad_norm": 0.4788981552771838, |
|
"learning_rate": 3.076485717806808e-05, |
|
"loss": 0.336, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.9456, |
|
"grad_norm": 0.43615735210231366, |
|
"learning_rate": 3.061356178771564e-05, |
|
"loss": 0.3673, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.9584000000000001, |
|
"grad_norm": 0.472358999942315, |
|
"learning_rate": 3.0461416152991555e-05, |
|
"loss": 0.3467, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.9712, |
|
"grad_norm": 0.4528381291272913, |
|
"learning_rate": 3.0308432462167045e-05, |
|
"loss": 0.3604, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.984, |
|
"grad_norm": 0.44002931894325426, |
|
"learning_rate": 3.015462297064936e-05, |
|
"loss": 0.3474, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.9968, |
|
"grad_norm": 0.4710557835531748, |
|
"learning_rate": 3.0000000000000004e-05, |
|
"loss": 0.3419, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 2.0096, |
|
"grad_norm": 0.5984188783781017, |
|
"learning_rate": 2.98445759369477e-05, |
|
"loss": 0.2575, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 2.0224, |
|
"grad_norm": 0.43808210985272245, |
|
"learning_rate": 2.9688363232396056e-05, |
|
"loss": 0.2603, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 2.0352, |
|
"grad_norm": 1.021104609817689, |
|
"learning_rate": 2.9531374400426158e-05, |
|
"loss": 0.2435, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 2.048, |
|
"grad_norm": 0.6229112357572324, |
|
"learning_rate": 2.9373622017294075e-05, |
|
"loss": 0.2395, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.0608, |
|
"grad_norm": 0.6400787003433385, |
|
"learning_rate": 2.9215118720423375e-05, |
|
"loss": 0.2427, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 2.0736, |
|
"grad_norm": 0.5219847532700883, |
|
"learning_rate": 2.9055877207392752e-05, |
|
"loss": 0.2442, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 2.0864, |
|
"grad_norm": 0.5514955488257288, |
|
"learning_rate": 2.8895910234918828e-05, |
|
"loss": 0.2294, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 2.0992, |
|
"grad_norm": 0.5429649242477618, |
|
"learning_rate": 2.873523061783426e-05, |
|
"loss": 0.2328, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 2.112, |
|
"grad_norm": 0.5566520824690419, |
|
"learning_rate": 2.8573851228061084e-05, |
|
"loss": 0.2235, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 2.1248, |
|
"grad_norm": 0.5337400616346479, |
|
"learning_rate": 2.8411784993579633e-05, |
|
"loss": 0.2305, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 2.1376, |
|
"grad_norm": 0.48911001853812824, |
|
"learning_rate": 2.8249044897392814e-05, |
|
"loss": 0.2234, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 2.1504, |
|
"grad_norm": 0.6209324495822574, |
|
"learning_rate": 2.80856439764861e-05, |
|
"loss": 0.2322, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 2.1632, |
|
"grad_norm": 0.45607628895768343, |
|
"learning_rate": 2.792159532078314e-05, |
|
"loss": 0.2351, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 2.176, |
|
"grad_norm": 0.5553683422626101, |
|
"learning_rate": 2.77569120720971e-05, |
|
"loss": 0.2289, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 2.1888, |
|
"grad_norm": 0.4504220323360159, |
|
"learning_rate": 2.7591607423077932e-05, |
|
"loss": 0.2336, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 2.2016, |
|
"grad_norm": 0.508634916908271, |
|
"learning_rate": 2.7425694616155474e-05, |
|
"loss": 0.2238, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 2.2144, |
|
"grad_norm": 0.44816802882428297, |
|
"learning_rate": 2.7259186942478656e-05, |
|
"loss": 0.2222, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 2.2272, |
|
"grad_norm": 0.42908742815813744, |
|
"learning_rate": 2.7092097740850712e-05, |
|
"loss": 0.2303, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 0.4758264147487167, |
|
"learning_rate": 2.692444039666066e-05, |
|
"loss": 0.2426, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 2.2528, |
|
"grad_norm": 0.41247654890650964, |
|
"learning_rate": 2.6756228340810946e-05, |
|
"loss": 0.2296, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 2.2656, |
|
"grad_norm": 0.41642953802943083, |
|
"learning_rate": 2.6587475048641596e-05, |
|
"loss": 0.2441, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 2.2784, |
|
"grad_norm": 0.4437874842536521, |
|
"learning_rate": 2.6418194038850634e-05, |
|
"loss": 0.2186, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 2.2912, |
|
"grad_norm": 0.41784001269201265, |
|
"learning_rate": 2.624839887241115e-05, |
|
"loss": 0.2318, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 2.304, |
|
"grad_norm": 0.46721551739616907, |
|
"learning_rate": 2.607810315148494e-05, |
|
"loss": 0.2093, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 2.3168, |
|
"grad_norm": 0.41112578910321584, |
|
"learning_rate": 2.5907320518332827e-05, |
|
"loss": 0.2441, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 2.3296, |
|
"grad_norm": 0.43979540887477736, |
|
"learning_rate": 2.5736064654221808e-05, |
|
"loss": 0.2214, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 2.3424, |
|
"grad_norm": 0.40544235255732436, |
|
"learning_rate": 2.5564349278329056e-05, |
|
"loss": 0.2108, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 2.3552, |
|
"grad_norm": 0.40184483388257736, |
|
"learning_rate": 2.539218814664288e-05, |
|
"loss": 0.2221, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 2.368, |
|
"grad_norm": 0.4308628597440365, |
|
"learning_rate": 2.521959505086075e-05, |
|
"loss": 0.2341, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 2.3808, |
|
"grad_norm": 0.39298579507534864, |
|
"learning_rate": 2.5046583817284437e-05, |
|
"loss": 0.2219, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 2.3936, |
|
"grad_norm": 0.43188493844288345, |
|
"learning_rate": 2.487316830571244e-05, |
|
"loss": 0.2424, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 2.4064, |
|
"grad_norm": 0.41130337697754366, |
|
"learning_rate": 2.4699362408329646e-05, |
|
"loss": 0.2222, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 2.4192, |
|
"grad_norm": 0.40246809462390837, |
|
"learning_rate": 2.4525180048594452e-05, |
|
"loss": 0.2199, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 2.432, |
|
"grad_norm": 0.4088112010685077, |
|
"learning_rate": 2.435063518012335e-05, |
|
"loss": 0.233, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 2.4448, |
|
"grad_norm": 0.3660759480240648, |
|
"learning_rate": 2.4175741785573177e-05, |
|
"loss": 0.2209, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 2.4576000000000002, |
|
"grad_norm": 0.3919129516116809, |
|
"learning_rate": 2.4000513875520892e-05, |
|
"loss": 0.228, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 2.4704, |
|
"grad_norm": 0.4077936034059262, |
|
"learning_rate": 2.3824965487341247e-05, |
|
"loss": 0.2304, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 2.4832, |
|
"grad_norm": 0.417985730697302, |
|
"learning_rate": 2.3649110684082258e-05, |
|
"loss": 0.2573, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 2.496, |
|
"grad_norm": 0.4227092312003486, |
|
"learning_rate": 2.3472963553338614e-05, |
|
"loss": 0.2283, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 2.5088, |
|
"grad_norm": 0.40115319247096887, |
|
"learning_rate": 2.3296538206123134e-05, |
|
"loss": 0.2333, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 2.5216, |
|
"grad_norm": 0.43510045143506826, |
|
"learning_rate": 2.311984877573636e-05, |
|
"loss": 0.2244, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 2.5343999999999998, |
|
"grad_norm": 0.3880884229030637, |
|
"learning_rate": 2.2942909416634326e-05, |
|
"loss": 0.2461, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 2.5472, |
|
"grad_norm": 0.40308061075230467, |
|
"learning_rate": 2.2765734303294666e-05, |
|
"loss": 0.2213, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 0.42756912689781795, |
|
"learning_rate": 2.2588337629081107e-05, |
|
"loss": 0.2206, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.5728, |
|
"grad_norm": 0.39943092230727767, |
|
"learning_rate": 2.2410733605106462e-05, |
|
"loss": 0.247, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 2.5856, |
|
"grad_norm": 0.37344591996160337, |
|
"learning_rate": 2.2232936459094158e-05, |
|
"loss": 0.2358, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 2.5984, |
|
"grad_norm": 0.40046097239336376, |
|
"learning_rate": 2.205496043423849e-05, |
|
"loss": 0.2403, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 2.6112, |
|
"grad_norm": 0.38476391536822985, |
|
"learning_rate": 2.1876819788063586e-05, |
|
"loss": 0.2212, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 2.624, |
|
"grad_norm": 0.43288279571587374, |
|
"learning_rate": 2.16985287912813e-05, |
|
"loss": 0.2368, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 2.6368, |
|
"grad_norm": 0.3700461308028724, |
|
"learning_rate": 2.1520101726647922e-05, |
|
"loss": 0.2618, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 2.6496, |
|
"grad_norm": 0.4616637161436342, |
|
"learning_rate": 2.1341552887820048e-05, |
|
"loss": 0.2174, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 2.6624, |
|
"grad_norm": 0.39035133902253477, |
|
"learning_rate": 2.1162896578209517e-05, |
|
"loss": 0.2225, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 2.6752000000000002, |
|
"grad_norm": 0.4615541847206986, |
|
"learning_rate": 2.0984147109837564e-05, |
|
"loss": 0.2451, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 2.6879999999999997, |
|
"grad_norm": 0.40812141024101817, |
|
"learning_rate": 2.0805318802188307e-05, |
|
"loss": 0.2448, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.7008, |
|
"grad_norm": 0.3790021279522406, |
|
"learning_rate": 2.0626425981061608e-05, |
|
"loss": 0.2268, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 2.7136, |
|
"grad_norm": 0.41068999411679585, |
|
"learning_rate": 2.0447482977425465e-05, |
|
"loss": 0.2355, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 2.7264, |
|
"grad_norm": 0.36597102281558597, |
|
"learning_rate": 2.0268504126267952e-05, |
|
"loss": 0.2389, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 2.7392, |
|
"grad_norm": 0.41906748164633945, |
|
"learning_rate": 2.008950376544887e-05, |
|
"loss": 0.2338, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 2.752, |
|
"grad_norm": 0.3847727926837576, |
|
"learning_rate": 1.9910496234551132e-05, |
|
"loss": 0.2418, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 2.7648, |
|
"grad_norm": 0.3602038255117451, |
|
"learning_rate": 1.9731495873732055e-05, |
|
"loss": 0.2533, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 2.7776, |
|
"grad_norm": 0.40242562598449944, |
|
"learning_rate": 1.9552517022574542e-05, |
|
"loss": 0.2464, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 2.7904, |
|
"grad_norm": 0.36537418924420284, |
|
"learning_rate": 1.93735740189384e-05, |
|
"loss": 0.2073, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 2.8032, |
|
"grad_norm": 0.3741508848048974, |
|
"learning_rate": 1.9194681197811703e-05, |
|
"loss": 0.2416, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 2.816, |
|
"grad_norm": 0.3625808111319984, |
|
"learning_rate": 1.901585289016244e-05, |
|
"loss": 0.2271, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.8288, |
|
"grad_norm": 0.3692557062739783, |
|
"learning_rate": 1.8837103421790486e-05, |
|
"loss": 0.227, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 2.8416, |
|
"grad_norm": 0.3828871524290137, |
|
"learning_rate": 1.8658447112179952e-05, |
|
"loss": 0.2197, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 2.8544, |
|
"grad_norm": 0.38151721911154457, |
|
"learning_rate": 1.8479898273352084e-05, |
|
"loss": 0.233, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 2.8672, |
|
"grad_norm": 0.3786750713015652, |
|
"learning_rate": 1.83014712087187e-05, |
|
"loss": 0.2316, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 0.36767050954329955, |
|
"learning_rate": 1.8123180211936417e-05, |
|
"loss": 0.2174, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.8928000000000003, |
|
"grad_norm": 0.3838174394324037, |
|
"learning_rate": 1.794503956576152e-05, |
|
"loss": 0.2556, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 2.9055999999999997, |
|
"grad_norm": 0.4122104081652899, |
|
"learning_rate": 1.776706354090585e-05, |
|
"loss": 0.2607, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 2.9184, |
|
"grad_norm": 0.36751878868243526, |
|
"learning_rate": 1.758926639489354e-05, |
|
"loss": 0.2224, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 2.9312, |
|
"grad_norm": 0.34514347989382804, |
|
"learning_rate": 1.7411662370918893e-05, |
|
"loss": 0.2329, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 2.944, |
|
"grad_norm": 0.3823271811821992, |
|
"learning_rate": 1.7234265696705344e-05, |
|
"loss": 0.233, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.9568, |
|
"grad_norm": 0.37643499161120975, |
|
"learning_rate": 1.7057090583365678e-05, |
|
"loss": 0.2395, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 2.9696, |
|
"grad_norm": 0.3987921136655855, |
|
"learning_rate": 1.6880151224263646e-05, |
|
"loss": 0.2301, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 2.9824, |
|
"grad_norm": 0.36187712283271656, |
|
"learning_rate": 1.6703461793876876e-05, |
|
"loss": 0.2378, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 2.9952, |
|
"grad_norm": 0.36431473793509095, |
|
"learning_rate": 1.6527036446661396e-05, |
|
"loss": 0.2162, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 3.008, |
|
"grad_norm": 0.4342500045335589, |
|
"learning_rate": 1.635088931591775e-05, |
|
"loss": 0.1861, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 3.0208, |
|
"grad_norm": 0.4129767033707882, |
|
"learning_rate": 1.6175034512658753e-05, |
|
"loss": 0.1464, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 3.0336, |
|
"grad_norm": 0.3312785368446801, |
|
"learning_rate": 1.5999486124479115e-05, |
|
"loss": 0.177, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 3.0464, |
|
"grad_norm": 0.4335543103057223, |
|
"learning_rate": 1.5824258214426833e-05, |
|
"loss": 0.1635, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 3.0592, |
|
"grad_norm": 0.4606606835139822, |
|
"learning_rate": 1.5649364819876655e-05, |
|
"loss": 0.1459, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 3.072, |
|
"grad_norm": 0.40242659216003274, |
|
"learning_rate": 1.547481995140556e-05, |
|
"loss": 0.138, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 3.0848, |
|
"grad_norm": 0.3335201884903501, |
|
"learning_rate": 1.5300637591670357e-05, |
|
"loss": 0.139, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 3.0976, |
|
"grad_norm": 0.3645495863663169, |
|
"learning_rate": 1.5126831694287564e-05, |
|
"loss": 0.1445, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 3.1104, |
|
"grad_norm": 0.3824301187556719, |
|
"learning_rate": 1.4953416182715566e-05, |
|
"loss": 0.146, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 3.1232, |
|
"grad_norm": 0.3424946141123901, |
|
"learning_rate": 1.478040494913926e-05, |
|
"loss": 0.1397, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 3.136, |
|
"grad_norm": 0.33139230307805356, |
|
"learning_rate": 1.460781185335713e-05, |
|
"loss": 0.1349, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 3.1488, |
|
"grad_norm": 0.3406508841684583, |
|
"learning_rate": 1.443565072167095e-05, |
|
"loss": 0.1474, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 3.1616, |
|
"grad_norm": 0.36887333789876015, |
|
"learning_rate": 1.4263935345778202e-05, |
|
"loss": 0.1641, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 3.1744, |
|
"grad_norm": 0.34568515710581665, |
|
"learning_rate": 1.409267948166718e-05, |
|
"loss": 0.1456, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 3.1872, |
|
"grad_norm": 0.32937953601397224, |
|
"learning_rate": 1.3921896848515064e-05, |
|
"loss": 0.1493, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 0.3358387850386913, |
|
"learning_rate": 1.3751601127588849e-05, |
|
"loss": 0.1372, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 3.2128, |
|
"grad_norm": 0.32369053977299683, |
|
"learning_rate": 1.3581805961149371e-05, |
|
"loss": 0.1362, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 3.2256, |
|
"grad_norm": 0.34777208110183666, |
|
"learning_rate": 1.341252495135841e-05, |
|
"loss": 0.1459, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 3.2384, |
|
"grad_norm": 0.3387384441271983, |
|
"learning_rate": 1.324377165918906e-05, |
|
"loss": 0.1297, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 3.2512, |
|
"grad_norm": 0.3305197022844186, |
|
"learning_rate": 1.3075559603339354e-05, |
|
"loss": 0.1467, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 3.2640000000000002, |
|
"grad_norm": 0.32825629222170216, |
|
"learning_rate": 1.2907902259149287e-05, |
|
"loss": 0.1422, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 3.2768, |
|
"grad_norm": 0.36275451831392513, |
|
"learning_rate": 1.274081305752135e-05, |
|
"loss": 0.1511, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 3.2896, |
|
"grad_norm": 0.34376622589938244, |
|
"learning_rate": 1.2574305383844528e-05, |
|
"loss": 0.1662, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 3.3024, |
|
"grad_norm": 0.3185289435275682, |
|
"learning_rate": 1.2408392576922075e-05, |
|
"loss": 0.1492, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 3.3152, |
|
"grad_norm": 0.3122326664709875, |
|
"learning_rate": 1.2243087927902905e-05, |
|
"loss": 0.1469, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 3.328, |
|
"grad_norm": 0.3299569491313471, |
|
"learning_rate": 1.2078404679216864e-05, |
|
"loss": 0.1467, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 3.3407999999999998, |
|
"grad_norm": 0.3080431216734959, |
|
"learning_rate": 1.1914356023513904e-05, |
|
"loss": 0.1548, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 3.3536, |
|
"grad_norm": 0.3078596243161126, |
|
"learning_rate": 1.1750955102607193e-05, |
|
"loss": 0.136, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 3.3664, |
|
"grad_norm": 0.3192246593041759, |
|
"learning_rate": 1.1588215006420374e-05, |
|
"loss": 0.1465, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 3.3792, |
|
"grad_norm": 0.3368616103541651, |
|
"learning_rate": 1.1426148771938915e-05, |
|
"loss": 0.1358, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 3.392, |
|
"grad_norm": 0.3026632461475973, |
|
"learning_rate": 1.1264769382165748e-05, |
|
"loss": 0.1592, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 3.4048, |
|
"grad_norm": 0.3106375759183477, |
|
"learning_rate": 1.110408976508118e-05, |
|
"loss": 0.1301, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 3.4176, |
|
"grad_norm": 0.31276027570498793, |
|
"learning_rate": 1.094412279260726e-05, |
|
"loss": 0.1226, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 3.4304, |
|
"grad_norm": 0.2962955540289862, |
|
"learning_rate": 1.0784881279576635e-05, |
|
"loss": 0.1577, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 3.4432, |
|
"grad_norm": 0.3057229986287228, |
|
"learning_rate": 1.0626377982705929e-05, |
|
"loss": 0.1465, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 3.456, |
|
"grad_norm": 0.31143010677072214, |
|
"learning_rate": 1.0468625599573842e-05, |
|
"loss": 0.1415, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 3.4688, |
|
"grad_norm": 0.3007146315051674, |
|
"learning_rate": 1.0311636767603952e-05, |
|
"loss": 0.1556, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 3.4816, |
|
"grad_norm": 0.3028862641231386, |
|
"learning_rate": 1.0155424063052306e-05, |
|
"loss": 0.1379, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 3.4944, |
|
"grad_norm": 0.3012569121094226, |
|
"learning_rate": 1.0000000000000006e-05, |
|
"loss": 0.1526, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 3.5072, |
|
"grad_norm": 0.29537699389840416, |
|
"learning_rate": 9.84537702935065e-06, |
|
"loss": 0.1452, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"grad_norm": 0.29288395833603187, |
|
"learning_rate": 9.691567537832964e-06, |
|
"loss": 0.144, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 3.5328, |
|
"grad_norm": 0.2980050111798448, |
|
"learning_rate": 9.538583847008452e-06, |
|
"loss": 0.1493, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 3.5456, |
|
"grad_norm": 0.3072752930176877, |
|
"learning_rate": 9.386438212284372e-06, |
|
"loss": 0.1796, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 3.5584, |
|
"grad_norm": 0.29972555622874003, |
|
"learning_rate": 9.235142821931928e-06, |
|
"loss": 0.1596, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 3.5712, |
|
"grad_norm": 0.3002806996999677, |
|
"learning_rate": 9.084709796109907e-06, |
|
"loss": 0.1453, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 3.584, |
|
"grad_norm": 0.31498127367500756, |
|
"learning_rate": 8.93515118589373e-06, |
|
"loss": 0.1546, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 3.5968, |
|
"grad_norm": 0.29673264593160975, |
|
"learning_rate": 8.786478972310023e-06, |
|
"loss": 0.1437, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 3.6096, |
|
"grad_norm": 0.298778931606412, |
|
"learning_rate": 8.638705065376887e-06, |
|
"loss": 0.1213, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 3.6224, |
|
"grad_norm": 0.30678453495493, |
|
"learning_rate": 8.491841303149728e-06, |
|
"loss": 0.1519, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 3.6352, |
|
"grad_norm": 0.2959759682993311, |
|
"learning_rate": 8.345899450772975e-06, |
|
"loss": 0.1505, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 3.648, |
|
"grad_norm": 0.29411661056927013, |
|
"learning_rate": 8.200891199537549e-06, |
|
"loss": 0.1362, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 3.6608, |
|
"grad_norm": 0.2892155768652224, |
|
"learning_rate": 8.056828165944282e-06, |
|
"loss": 0.1539, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 3.6736, |
|
"grad_norm": 0.2795468650885671, |
|
"learning_rate": 7.913721890773354e-06, |
|
"loss": 0.1253, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 3.6864, |
|
"grad_norm": 0.3785102022769812, |
|
"learning_rate": 7.771583838159756e-06, |
|
"loss": 0.1387, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 3.6992000000000003, |
|
"grad_norm": 0.3002694668553819, |
|
"learning_rate": 7.630425394674903e-06, |
|
"loss": 0.1397, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 3.7119999999999997, |
|
"grad_norm": 0.29436747743610725, |
|
"learning_rate": 7.49025786841445e-06, |
|
"loss": 0.1423, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 3.7248, |
|
"grad_norm": 0.2952396977031661, |
|
"learning_rate": 7.3510924880924575e-06, |
|
"loss": 0.1318, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 3.7376, |
|
"grad_norm": 0.30243034458148, |
|
"learning_rate": 7.212940402141808e-06, |
|
"loss": 0.1579, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 3.7504, |
|
"grad_norm": 0.2959833802980389, |
|
"learning_rate": 7.075812677821145e-06, |
|
"loss": 0.1395, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 3.7632, |
|
"grad_norm": 0.29261738770695445, |
|
"learning_rate": 6.939720300328303e-06, |
|
"loss": 0.1459, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 3.776, |
|
"grad_norm": 0.27437485695959046, |
|
"learning_rate": 6.8046741719202385e-06, |
|
"loss": 0.156, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 3.7888, |
|
"grad_norm": 0.2852412068436951, |
|
"learning_rate": 6.67068511103971e-06, |
|
"loss": 0.1222, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 3.8016, |
|
"grad_norm": 0.27664571131499804, |
|
"learning_rate": 6.537763851448593e-06, |
|
"loss": 0.142, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 3.8144, |
|
"grad_norm": 0.29312076194262626, |
|
"learning_rate": 6.4059210413680175e-06, |
|
"loss": 0.1388, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 3.8272, |
|
"grad_norm": 0.29420685159823096, |
|
"learning_rate": 6.275167242625331e-06, |
|
"loss": 0.1356, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"grad_norm": 0.283690838251792, |
|
"learning_rate": 6.145512929808013e-06, |
|
"loss": 0.1529, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 3.8528000000000002, |
|
"grad_norm": 0.28242081034865174, |
|
"learning_rate": 6.016968489424572e-06, |
|
"loss": 0.1399, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 3.8656, |
|
"grad_norm": 0.287923553335738, |
|
"learning_rate": 5.889544219072465e-06, |
|
"loss": 0.1455, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 3.8784, |
|
"grad_norm": 0.2860752321041213, |
|
"learning_rate": 5.7632503266131925e-06, |
|
"loss": 0.1288, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 3.8912, |
|
"grad_norm": 0.28249363240206893, |
|
"learning_rate": 5.638096929354522e-06, |
|
"loss": 0.1313, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 3.904, |
|
"grad_norm": 0.27630088292909294, |
|
"learning_rate": 5.514094053240035e-06, |
|
"loss": 0.1427, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 3.9168, |
|
"grad_norm": 0.29211186438565667, |
|
"learning_rate": 5.39125163204594e-06, |
|
"loss": 0.139, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 3.9295999999999998, |
|
"grad_norm": 0.29642167879499903, |
|
"learning_rate": 5.269579506585259e-06, |
|
"loss": 0.134, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 3.9424, |
|
"grad_norm": 0.28616488618453645, |
|
"learning_rate": 5.149087423919541e-06, |
|
"loss": 0.1512, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 3.9552, |
|
"grad_norm": 0.286134692196085, |
|
"learning_rate": 5.029785036577976e-06, |
|
"loss": 0.1485, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 3.968, |
|
"grad_norm": 0.28160417320585635, |
|
"learning_rate": 4.911681901784198e-06, |
|
"loss": 0.1252, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 3.9808, |
|
"grad_norm": 0.2732320650248573, |
|
"learning_rate": 4.794787480690597e-06, |
|
"loss": 0.1379, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 3.9936, |
|
"grad_norm": 0.2890348891935754, |
|
"learning_rate": 4.679111137620442e-06, |
|
"loss": 0.1382, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 4.0064, |
|
"grad_norm": 0.2658873774571514, |
|
"learning_rate": 4.5646621393177e-06, |
|
"loss": 0.12, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 4.0192, |
|
"grad_norm": 0.28829467706893275, |
|
"learning_rate": 4.451449654204685e-06, |
|
"loss": 0.1113, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 4.032, |
|
"grad_norm": 0.2926462624152583, |
|
"learning_rate": 4.339482751647557e-06, |
|
"loss": 0.109, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 4.0448, |
|
"grad_norm": 0.23217579454357423, |
|
"learning_rate": 4.228770401229824e-06, |
|
"loss": 0.0929, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 4.0576, |
|
"grad_norm": 0.22632856588643657, |
|
"learning_rate": 4.119321472033779e-06, |
|
"loss": 0.0886, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 4.0704, |
|
"grad_norm": 0.2697818741789059, |
|
"learning_rate": 4.011144731929981e-06, |
|
"loss": 0.12, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 4.0832, |
|
"grad_norm": 0.2958760189814652, |
|
"learning_rate": 3.904248846874894e-06, |
|
"loss": 0.1027, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 4.096, |
|
"grad_norm": 0.31239726698428777, |
|
"learning_rate": 3.7986423802166705e-06, |
|
"loss": 0.1095, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 4.1088, |
|
"grad_norm": 0.30945783794605863, |
|
"learning_rate": 3.694333792009115e-06, |
|
"loss": 0.112, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 4.1216, |
|
"grad_norm": 0.27042311557861265, |
|
"learning_rate": 3.5913314383339937e-06, |
|
"loss": 0.1211, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 4.1344, |
|
"grad_norm": 0.250810164241747, |
|
"learning_rate": 3.4896435706316e-06, |
|
"loss": 0.094, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 4.1472, |
|
"grad_norm": 0.22766150547994146, |
|
"learning_rate": 3.3892783350397675e-06, |
|
"loss": 0.0977, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"grad_norm": 0.25442781418258187, |
|
"learning_rate": 3.290243771741275e-06, |
|
"loss": 0.0886, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 4.1728, |
|
"grad_norm": 0.24128634253047432, |
|
"learning_rate": 3.1925478143197418e-06, |
|
"loss": 0.111, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 4.1856, |
|
"grad_norm": 0.22809175946788002, |
|
"learning_rate": 3.0961982891241083e-06, |
|
"loss": 0.1091, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 4.1984, |
|
"grad_norm": 0.22838394593196218, |
|
"learning_rate": 3.001202914641628e-06, |
|
"loss": 0.131, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 4.2112, |
|
"grad_norm": 0.24123953281829802, |
|
"learning_rate": 2.907569300879596e-06, |
|
"loss": 0.1065, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 4.224, |
|
"grad_norm": 0.2268091319969786, |
|
"learning_rate": 2.815304948755664e-06, |
|
"loss": 0.1019, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 4.2368, |
|
"grad_norm": 0.22335799894308225, |
|
"learning_rate": 2.7244172494969978e-06, |
|
"loss": 0.0993, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 4.2496, |
|
"grad_norm": 0.21715114999813875, |
|
"learning_rate": 2.6349134840481294e-06, |
|
"loss": 0.1006, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 4.2624, |
|
"grad_norm": 0.2104223361612852, |
|
"learning_rate": 2.546800822487714e-06, |
|
"loss": 0.1009, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 4.2752, |
|
"grad_norm": 0.2106637576403025, |
|
"learning_rate": 2.4600863234541338e-06, |
|
"loss": 0.1022, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 4.288, |
|
"grad_norm": 0.2263490487535301, |
|
"learning_rate": 2.374776933580025e-06, |
|
"loss": 0.0964, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 4.3008, |
|
"grad_norm": 0.21501359524388158, |
|
"learning_rate": 2.2908794869358044e-06, |
|
"loss": 0.1009, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 4.3136, |
|
"grad_norm": 0.22569777910551836, |
|
"learning_rate": 2.2084007044821764e-06, |
|
"loss": 0.0807, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 4.3264, |
|
"grad_norm": 0.2077323346879666, |
|
"learning_rate": 2.127347193531757e-06, |
|
"loss": 0.0933, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 4.3392, |
|
"grad_norm": 0.21045077059465855, |
|
"learning_rate": 2.0477254472197237e-06, |
|
"loss": 0.1056, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 4.352, |
|
"grad_norm": 0.23590019433974949, |
|
"learning_rate": 1.96954184398368e-06, |
|
"loss": 0.1014, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 4.3648, |
|
"grad_norm": 0.22529326295060567, |
|
"learning_rate": 1.8928026470526917e-06, |
|
"loss": 0.1053, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 4.3776, |
|
"grad_norm": 0.21547965472381023, |
|
"learning_rate": 1.817514003945524e-06, |
|
"loss": 0.1063, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 4.3904, |
|
"grad_norm": 0.22313890152734314, |
|
"learning_rate": 1.743681945978184e-06, |
|
"loss": 0.0981, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 4.4032, |
|
"grad_norm": 0.2272155930696546, |
|
"learning_rate": 1.6713123877807413e-06, |
|
"loss": 0.1042, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 4.416, |
|
"grad_norm": 0.22055667722703767, |
|
"learning_rate": 1.6004111268235156e-06, |
|
"loss": 0.0959, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 4.4288, |
|
"grad_norm": 0.22833080435593262, |
|
"learning_rate": 1.5309838429526714e-06, |
|
"loss": 0.1113, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 4.4416, |
|
"grad_norm": 0.2125777220603205, |
|
"learning_rate": 1.4630360979351644e-06, |
|
"loss": 0.0899, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 4.4544, |
|
"grad_norm": 0.22182880401841537, |
|
"learning_rate": 1.396573335013236e-06, |
|
"loss": 0.0991, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 4.4672, |
|
"grad_norm": 0.21638022460250375, |
|
"learning_rate": 1.3316008784683265e-06, |
|
"loss": 0.0874, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"grad_norm": 0.2210865190100104, |
|
"learning_rate": 1.2681239331945695e-06, |
|
"loss": 0.102, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 4.4928, |
|
"grad_norm": 0.21375159066828703, |
|
"learning_rate": 1.2061475842818337e-06, |
|
"loss": 0.1007, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 4.5056, |
|
"grad_norm": 0.21618809403988168, |
|
"learning_rate": 1.1456767966083393e-06, |
|
"loss": 0.0894, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 4.5184, |
|
"grad_norm": 0.21470287491838058, |
|
"learning_rate": 1.086716414442952e-06, |
|
"loss": 0.0838, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 4.5312, |
|
"grad_norm": 0.21983713612706263, |
|
"learning_rate": 1.0292711610570904e-06, |
|
"loss": 0.105, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 4.5440000000000005, |
|
"grad_norm": 0.2215884766371513, |
|
"learning_rate": 9.733456383463658e-07, |
|
"loss": 0.1135, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 4.5568, |
|
"grad_norm": 0.21053572943937773, |
|
"learning_rate": 9.189443264619102e-07, |
|
"loss": 0.1011, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 4.5696, |
|
"grad_norm": 0.2338281134822983, |
|
"learning_rate": 8.660715834514977e-07, |
|
"loss": 0.0995, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 4.5824, |
|
"grad_norm": 0.20823111592940963, |
|
"learning_rate": 8.147316449103959e-07, |
|
"loss": 0.1244, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 4.5952, |
|
"grad_norm": 0.2061050909395005, |
|
"learning_rate": 7.649286236420806e-07, |
|
"loss": 0.0858, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 4.608, |
|
"grad_norm": 0.21148070237488217, |
|
"learning_rate": 7.166665093287539e-07, |
|
"loss": 0.1056, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 4.6208, |
|
"grad_norm": 0.21288461368812162, |
|
"learning_rate": 6.69949168211721e-07, |
|
"loss": 0.1011, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 4.6336, |
|
"grad_norm": 0.2057873955256772, |
|
"learning_rate": 6.247803427816945e-07, |
|
"loss": 0.106, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 4.6464, |
|
"grad_norm": 0.21635344730518735, |
|
"learning_rate": 5.811636514789598e-07, |
|
"loss": 0.0949, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 4.6592, |
|
"grad_norm": 0.2143613382155315, |
|
"learning_rate": 5.391025884035239e-07, |
|
"loss": 0.0927, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 4.672, |
|
"grad_norm": 0.20228998947621846, |
|
"learning_rate": 4.986005230351954e-07, |
|
"loss": 0.1128, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 4.6848, |
|
"grad_norm": 0.20482983920733092, |
|
"learning_rate": 4.5966069996365993e-07, |
|
"loss": 0.0936, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 4.6975999999999996, |
|
"grad_norm": 0.21953218401893168, |
|
"learning_rate": 4.22286238628562e-07, |
|
"loss": 0.0891, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 4.7104, |
|
"grad_norm": 0.20244241996307016, |
|
"learning_rate": 3.8648013306960664e-07, |
|
"loss": 0.0932, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 4.7232, |
|
"grad_norm": 0.21391720405666895, |
|
"learning_rate": 3.522452516867048e-07, |
|
"loss": 0.1021, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 4.736, |
|
"grad_norm": 0.2182333422260704, |
|
"learning_rate": 3.1958433701019697e-07, |
|
"loss": 0.1177, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 4.7488, |
|
"grad_norm": 0.2084304991983031, |
|
"learning_rate": 2.8850000548115155e-07, |
|
"loss": 0.0955, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 4.7616, |
|
"grad_norm": 0.20795283461277356, |
|
"learning_rate": 2.5899474724174313e-07, |
|
"loss": 0.0936, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 4.7744, |
|
"grad_norm": 0.21861030107083146, |
|
"learning_rate": 2.3107092593579905e-07, |
|
"loss": 0.1057, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 4.7872, |
|
"grad_norm": 0.21161038554212513, |
|
"learning_rate": 2.0473077851942858e-07, |
|
"loss": 0.1217, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"grad_norm": 0.20071896496622085, |
|
"learning_rate": 1.799764150818306e-07, |
|
"loss": 0.105, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 4.8128, |
|
"grad_norm": 0.22336813996118293, |
|
"learning_rate": 1.5680981867625566e-07, |
|
"loss": 0.0869, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 4.8256, |
|
"grad_norm": 0.20626212361882176, |
|
"learning_rate": 1.3523284516113955e-07, |
|
"loss": 0.1049, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 4.8384, |
|
"grad_norm": 0.20765922740991494, |
|
"learning_rate": 1.1524722305144231e-07, |
|
"loss": 0.1028, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 4.8512, |
|
"grad_norm": 0.22275529422271872, |
|
"learning_rate": 9.685455338016347e-08, |
|
"loss": 0.1009, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 4.864, |
|
"grad_norm": 0.22135908885017214, |
|
"learning_rate": 8.005630957010014e-08, |
|
"loss": 0.098, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 4.8768, |
|
"grad_norm": 0.20830230664411545, |
|
"learning_rate": 6.485383731580142e-08, |
|
"loss": 0.1017, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 4.8896, |
|
"grad_norm": 0.20226748336287717, |
|
"learning_rate": 5.1248354475768034e-08, |
|
"loss": 0.0889, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 4.9024, |
|
"grad_norm": 0.19992620933341473, |
|
"learning_rate": 3.924095097489922e-08, |
|
"loss": 0.1047, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 4.9152000000000005, |
|
"grad_norm": 0.20269374334136603, |
|
"learning_rate": 2.8832588717164766e-08, |
|
"loss": 0.1073, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 4.928, |
|
"grad_norm": 0.2081832474041765, |
|
"learning_rate": 2.0024101508555604e-08, |
|
"loss": 0.0903, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 4.9408, |
|
"grad_norm": 0.19899878263209597, |
|
"learning_rate": 1.281619499029274e-08, |
|
"loss": 0.0797, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 4.9536, |
|
"grad_norm": 0.21031388981769522, |
|
"learning_rate": 7.209446582292501e-09, |
|
"loss": 0.0867, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 4.9664, |
|
"grad_norm": 0.20703163671053085, |
|
"learning_rate": 3.2043054369057523e-09, |
|
"loss": 0.1016, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 4.9792, |
|
"grad_norm": 0.2062387617505902, |
|
"learning_rate": 8.010924029533406e-10, |
|
"loss": 0.0865, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 4.992, |
|
"grad_norm": 0.2122944755587596, |
|
"learning_rate": 0.0, |
|
"loss": 0.1011, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 4.992, |
|
"step": 390, |
|
"total_flos": 5.358630760129495e+17, |
|
"train_loss": 0.2751285242537657, |
|
"train_runtime": 10768.1535, |
|
"train_samples_per_second": 4.643, |
|
"train_steps_per_second": 0.036 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 390, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.358630760129495e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|