|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.992, |
|
"eval_steps": 500, |
|
"global_step": 390, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0128, |
|
"grad_norm": 6.3857031845988805, |
|
"learning_rate": 1.0256410256410257e-06, |
|
"loss": 1.0146, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0256, |
|
"grad_norm": 6.513457738231102, |
|
"learning_rate": 2.0512820512820513e-06, |
|
"loss": 1.0407, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0384, |
|
"grad_norm": 6.389369904722248, |
|
"learning_rate": 3.0769230769230774e-06, |
|
"loss": 1.0401, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0512, |
|
"grad_norm": 5.657954631643649, |
|
"learning_rate": 4.102564102564103e-06, |
|
"loss": 1.0027, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.064, |
|
"grad_norm": 4.2492750171036775, |
|
"learning_rate": 5.128205128205128e-06, |
|
"loss": 0.9808, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0768, |
|
"grad_norm": 2.787538797244276, |
|
"learning_rate": 6.153846153846155e-06, |
|
"loss": 0.9533, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0896, |
|
"grad_norm": 2.545431313650447, |
|
"learning_rate": 7.17948717948718e-06, |
|
"loss": 0.9248, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.1024, |
|
"grad_norm": 4.368850015054219, |
|
"learning_rate": 8.205128205128205e-06, |
|
"loss": 0.9499, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.1152, |
|
"grad_norm": 4.286250499212212, |
|
"learning_rate": 9.230769230769232e-06, |
|
"loss": 0.9464, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.128, |
|
"grad_norm": 4.095203710101558, |
|
"learning_rate": 1.0256410256410256e-05, |
|
"loss": 0.9051, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.1408, |
|
"grad_norm": 3.718972852702716, |
|
"learning_rate": 1.1282051282051283e-05, |
|
"loss": 0.8952, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.1536, |
|
"grad_norm": 2.5516000233573277, |
|
"learning_rate": 1.230769230769231e-05, |
|
"loss": 0.8712, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.1664, |
|
"grad_norm": 1.643547893866034, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 0.8577, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.1792, |
|
"grad_norm": 1.6053997700362166, |
|
"learning_rate": 1.435897435897436e-05, |
|
"loss": 0.8136, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.192, |
|
"grad_norm": 1.3344448240126532, |
|
"learning_rate": 1.5384615384615387e-05, |
|
"loss": 0.7975, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.2048, |
|
"grad_norm": 1.0761544947032076, |
|
"learning_rate": 1.641025641025641e-05, |
|
"loss": 0.7872, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.2176, |
|
"grad_norm": 1.1636146263275826, |
|
"learning_rate": 1.7435897435897438e-05, |
|
"loss": 0.8055, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.2304, |
|
"grad_norm": 1.0969147745284513, |
|
"learning_rate": 1.8461538461538465e-05, |
|
"loss": 0.7756, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.2432, |
|
"grad_norm": 0.9698538367910754, |
|
"learning_rate": 1.9487179487179488e-05, |
|
"loss": 0.7499, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.256, |
|
"grad_norm": 1.0666601130971711, |
|
"learning_rate": 2.0512820512820512e-05, |
|
"loss": 0.7432, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.2688, |
|
"grad_norm": 0.9401468890973036, |
|
"learning_rate": 2.153846153846154e-05, |
|
"loss": 0.7327, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.2816, |
|
"grad_norm": 0.8000355819072862, |
|
"learning_rate": 2.2564102564102566e-05, |
|
"loss": 0.7404, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.2944, |
|
"grad_norm": 0.9438590359860037, |
|
"learning_rate": 2.3589743589743593e-05, |
|
"loss": 0.7164, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.3072, |
|
"grad_norm": 0.8533247867027401, |
|
"learning_rate": 2.461538461538462e-05, |
|
"loss": 0.7383, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.999532561766686, |
|
"learning_rate": 2.5641025641025646e-05, |
|
"loss": 0.7248, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.3328, |
|
"grad_norm": 0.8006836702962915, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 0.7351, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.3456, |
|
"grad_norm": 0.6805979308241464, |
|
"learning_rate": 2.7692307692307694e-05, |
|
"loss": 0.7205, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.3584, |
|
"grad_norm": 0.998808499998666, |
|
"learning_rate": 2.871794871794872e-05, |
|
"loss": 0.7096, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.3712, |
|
"grad_norm": 0.9809835269133407, |
|
"learning_rate": 2.9743589743589747e-05, |
|
"loss": 0.7113, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.384, |
|
"grad_norm": 1.345020185848282, |
|
"learning_rate": 3.0769230769230774e-05, |
|
"loss": 0.6985, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.3968, |
|
"grad_norm": 0.8199700627632992, |
|
"learning_rate": 3.1794871794871795e-05, |
|
"loss": 0.7008, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.4096, |
|
"grad_norm": 0.9280100911930844, |
|
"learning_rate": 3.282051282051282e-05, |
|
"loss": 0.6964, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.4224, |
|
"grad_norm": 1.1968103155254415, |
|
"learning_rate": 3.384615384615385e-05, |
|
"loss": 0.7062, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.4352, |
|
"grad_norm": 1.2904515116682622, |
|
"learning_rate": 3.4871794871794875e-05, |
|
"loss": 0.6876, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.448, |
|
"grad_norm": 0.7405587717186609, |
|
"learning_rate": 3.58974358974359e-05, |
|
"loss": 0.6942, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.4608, |
|
"grad_norm": 1.2333725656317318, |
|
"learning_rate": 3.692307692307693e-05, |
|
"loss": 0.6816, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.4736, |
|
"grad_norm": 0.9199921652580408, |
|
"learning_rate": 3.794871794871795e-05, |
|
"loss": 0.6851, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.4864, |
|
"grad_norm": 1.1266206767787048, |
|
"learning_rate": 3.8974358974358976e-05, |
|
"loss": 0.6834, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.4992, |
|
"grad_norm": 1.0983329089323715, |
|
"learning_rate": 4e-05, |
|
"loss": 0.6849, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.512, |
|
"grad_norm": 1.2933039671370061, |
|
"learning_rate": 3.9999198907597046e-05, |
|
"loss": 0.6792, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.5248, |
|
"grad_norm": 1.1044414827820608, |
|
"learning_rate": 3.9996795694563096e-05, |
|
"loss": 0.6815, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.5376, |
|
"grad_norm": 1.6229197518771812, |
|
"learning_rate": 3.999279055341771e-05, |
|
"loss": 0.6812, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.5504, |
|
"grad_norm": 1.0537460648246924, |
|
"learning_rate": 3.998718380500971e-05, |
|
"loss": 0.6669, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.5632, |
|
"grad_norm": 1.6669868894341158, |
|
"learning_rate": 3.997997589849145e-05, |
|
"loss": 0.6824, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.576, |
|
"grad_norm": 1.062285226779462, |
|
"learning_rate": 3.9971167411282835e-05, |
|
"loss": 0.6691, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.5888, |
|
"grad_norm": 2.315993547681366, |
|
"learning_rate": 3.99607590490251e-05, |
|
"loss": 0.6761, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.6016, |
|
"grad_norm": 2.0499112390059926, |
|
"learning_rate": 3.9948751645524235e-05, |
|
"loss": 0.6644, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.6144, |
|
"grad_norm": 1.2800447567817648, |
|
"learning_rate": 3.9935146162684206e-05, |
|
"loss": 0.6809, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.6272, |
|
"grad_norm": 1.4208100924571356, |
|
"learning_rate": 3.9919943690429906e-05, |
|
"loss": 0.6789, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.2828897919868043, |
|
"learning_rate": 3.9903145446619837e-05, |
|
"loss": 0.6727, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.6528, |
|
"grad_norm": 1.1378068747326247, |
|
"learning_rate": 3.9884752776948564e-05, |
|
"loss": 0.6666, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.6656, |
|
"grad_norm": 1.1703720670666418, |
|
"learning_rate": 3.9864767154838864e-05, |
|
"loss": 0.6535, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.6784, |
|
"grad_norm": 0.8378124740862146, |
|
"learning_rate": 3.9843190181323744e-05, |
|
"loss": 0.6743, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.6912, |
|
"grad_norm": 1.1803403654880005, |
|
"learning_rate": 3.982002358491817e-05, |
|
"loss": 0.6607, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.704, |
|
"grad_norm": 0.7014239373149845, |
|
"learning_rate": 3.979526922148058e-05, |
|
"loss": 0.6495, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.7168, |
|
"grad_norm": 0.9599898972216756, |
|
"learning_rate": 3.9768929074064206e-05, |
|
"loss": 0.6731, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.7296, |
|
"grad_norm": 1.015973863547262, |
|
"learning_rate": 3.9741005252758255e-05, |
|
"loss": 0.6732, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.7424, |
|
"grad_norm": 1.3645890344778389, |
|
"learning_rate": 3.971149999451886e-05, |
|
"loss": 0.6505, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.7552, |
|
"grad_norm": 0.7396969296398802, |
|
"learning_rate": 3.9680415662989806e-05, |
|
"loss": 0.6515, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.768, |
|
"grad_norm": 0.7457766555713855, |
|
"learning_rate": 3.9647754748313294e-05, |
|
"loss": 0.6551, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.7808, |
|
"grad_norm": 1.0213173629581123, |
|
"learning_rate": 3.96135198669304e-05, |
|
"loss": 0.6578, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.7936, |
|
"grad_norm": 1.420903141785154, |
|
"learning_rate": 3.957771376137144e-05, |
|
"loss": 0.6535, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.8064, |
|
"grad_norm": 0.6787691231974609, |
|
"learning_rate": 3.954033930003634e-05, |
|
"loss": 0.642, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.8192, |
|
"grad_norm": 1.3907521323681888, |
|
"learning_rate": 3.9501399476964806e-05, |
|
"loss": 0.6522, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.832, |
|
"grad_norm": 1.1489305276103492, |
|
"learning_rate": 3.946089741159648e-05, |
|
"loss": 0.6471, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.8448, |
|
"grad_norm": 0.82984156577414, |
|
"learning_rate": 3.9418836348521045e-05, |
|
"loss": 0.6577, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.8576, |
|
"grad_norm": 1.558153671441526, |
|
"learning_rate": 3.937521965721831e-05, |
|
"loss": 0.6491, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.8704, |
|
"grad_norm": 0.701248806012898, |
|
"learning_rate": 3.933005083178828e-05, |
|
"loss": 0.6478, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.8832, |
|
"grad_norm": 1.541542547111201, |
|
"learning_rate": 3.928333349067125e-05, |
|
"loss": 0.6482, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.896, |
|
"grad_norm": 0.790406383186093, |
|
"learning_rate": 3.923507137635792e-05, |
|
"loss": 0.6322, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.9088, |
|
"grad_norm": 1.0598252879306564, |
|
"learning_rate": 3.9185268355089606e-05, |
|
"loss": 0.6645, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.9216, |
|
"grad_norm": 1.1772692501202318, |
|
"learning_rate": 3.913392841654851e-05, |
|
"loss": 0.6407, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.9344, |
|
"grad_norm": 0.5997636093563055, |
|
"learning_rate": 3.9081055673538093e-05, |
|
"loss": 0.6454, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.9472, |
|
"grad_norm": 1.1473486204015397, |
|
"learning_rate": 3.902665436165364e-05, |
|
"loss": 0.6412, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.8986351955161239, |
|
"learning_rate": 3.897072883894291e-05, |
|
"loss": 0.6457, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.9728, |
|
"grad_norm": 0.7846398455496357, |
|
"learning_rate": 3.8913283585557054e-05, |
|
"loss": 0.6372, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.9856, |
|
"grad_norm": 1.1094087249281304, |
|
"learning_rate": 3.885432320339167e-05, |
|
"loss": 0.6432, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.9984, |
|
"grad_norm": 0.6848404586025102, |
|
"learning_rate": 3.879385241571817e-05, |
|
"loss": 0.6306, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.0112, |
|
"grad_norm": 0.9964224498739199, |
|
"learning_rate": 3.873187606680543e-05, |
|
"loss": 0.5884, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.024, |
|
"grad_norm": 0.9102689953399805, |
|
"learning_rate": 3.866839912153168e-05, |
|
"loss": 0.5942, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.0368, |
|
"grad_norm": 1.1178515136602816, |
|
"learning_rate": 3.860342666498677e-05, |
|
"loss": 0.5925, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.0496, |
|
"grad_norm": 1.1353848859438942, |
|
"learning_rate": 3.853696390206484e-05, |
|
"loss": 0.5844, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.0624, |
|
"grad_norm": 0.7692431316197359, |
|
"learning_rate": 3.846901615704734e-05, |
|
"loss": 0.6025, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.0752, |
|
"grad_norm": 1.0299639532071048, |
|
"learning_rate": 3.839958887317649e-05, |
|
"loss": 0.5934, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.088, |
|
"grad_norm": 1.096779927244621, |
|
"learning_rate": 3.832868761221926e-05, |
|
"loss": 0.5838, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.1008, |
|
"grad_norm": 0.5928090493327227, |
|
"learning_rate": 3.825631805402182e-05, |
|
"loss": 0.5901, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 1.1136, |
|
"grad_norm": 0.782379693650114, |
|
"learning_rate": 3.818248599605448e-05, |
|
"loss": 0.5894, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 1.1264, |
|
"grad_norm": 0.8060013915829145, |
|
"learning_rate": 3.810719735294731e-05, |
|
"loss": 0.5808, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.1392, |
|
"grad_norm": 0.7145181062083139, |
|
"learning_rate": 3.8030458156016326e-05, |
|
"loss": 0.5775, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.152, |
|
"grad_norm": 0.6962399353140069, |
|
"learning_rate": 3.795227455278029e-05, |
|
"loss": 0.5834, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.1648, |
|
"grad_norm": 0.7395804826488809, |
|
"learning_rate": 3.787265280646825e-05, |
|
"loss": 0.5827, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.1776, |
|
"grad_norm": 0.5621723281500479, |
|
"learning_rate": 3.7791599295517825e-05, |
|
"loss": 0.5791, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.1904, |
|
"grad_norm": 0.5318029984699583, |
|
"learning_rate": 3.7709120513064196e-05, |
|
"loss": 0.5943, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.2032, |
|
"grad_norm": 0.5733431964660091, |
|
"learning_rate": 3.762522306641998e-05, |
|
"loss": 0.5885, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.216, |
|
"grad_norm": 0.6960371666052328, |
|
"learning_rate": 3.7539913676545874e-05, |
|
"loss": 0.5955, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.2288000000000001, |
|
"grad_norm": 0.9359896638510405, |
|
"learning_rate": 3.745319917751229e-05, |
|
"loss": 0.5842, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.2416, |
|
"grad_norm": 0.8480765879835703, |
|
"learning_rate": 3.736508651595188e-05, |
|
"loss": 0.5771, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.2544, |
|
"grad_norm": 0.5400407046866157, |
|
"learning_rate": 3.727558275050301e-05, |
|
"loss": 0.5747, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.2671999999999999, |
|
"grad_norm": 0.8161506433842548, |
|
"learning_rate": 3.718469505124434e-05, |
|
"loss": 0.5766, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.5058811630616397, |
|
"learning_rate": 3.709243069912041e-05, |
|
"loss": 0.5837, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.2928, |
|
"grad_norm": 0.7431695528629912, |
|
"learning_rate": 3.699879708535838e-05, |
|
"loss": 0.5723, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.3056, |
|
"grad_norm": 0.6643391693290811, |
|
"learning_rate": 3.69038017108759e-05, |
|
"loss": 0.5751, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.3184, |
|
"grad_norm": 0.6011164147503848, |
|
"learning_rate": 3.680745218568026e-05, |
|
"loss": 0.5876, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.3312, |
|
"grad_norm": 0.4730209286365697, |
|
"learning_rate": 3.6709756228258735e-05, |
|
"loss": 0.595, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.3439999999999999, |
|
"grad_norm": 0.5841846391799305, |
|
"learning_rate": 3.6610721664960236e-05, |
|
"loss": 0.6005, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.3568, |
|
"grad_norm": 0.6632343206084076, |
|
"learning_rate": 3.65103564293684e-05, |
|
"loss": 0.5804, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.3696, |
|
"grad_norm": 0.6630054629726436, |
|
"learning_rate": 3.640866856166601e-05, |
|
"loss": 0.5769, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.3824, |
|
"grad_norm": 0.598020909384883, |
|
"learning_rate": 3.6305666207990886e-05, |
|
"loss": 0.588, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.3952, |
|
"grad_norm": 0.48985433484279217, |
|
"learning_rate": 3.6201357619783336e-05, |
|
"loss": 0.57, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.408, |
|
"grad_norm": 0.3863983254888785, |
|
"learning_rate": 3.609575115312511e-05, |
|
"loss": 0.5729, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.4208, |
|
"grad_norm": 0.4740187512818743, |
|
"learning_rate": 3.598885526807003e-05, |
|
"loss": 0.5793, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.4336, |
|
"grad_norm": 0.42622525168734493, |
|
"learning_rate": 3.5880678527966224e-05, |
|
"loss": 0.5841, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.4464000000000001, |
|
"grad_norm": 0.39643940618622864, |
|
"learning_rate": 3.577122959877017e-05, |
|
"loss": 0.5861, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.4592, |
|
"grad_norm": 0.4242528887242483, |
|
"learning_rate": 3.566051724835245e-05, |
|
"loss": 0.5834, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.472, |
|
"grad_norm": 0.5421500517599109, |
|
"learning_rate": 3.554855034579532e-05, |
|
"loss": 0.582, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.4848, |
|
"grad_norm": 0.5311832845228792, |
|
"learning_rate": 3.5435337860682304e-05, |
|
"loss": 0.572, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.4976, |
|
"grad_norm": 0.4662212069404133, |
|
"learning_rate": 3.532088886237956e-05, |
|
"loss": 0.5783, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.5104, |
|
"grad_norm": 0.45833419968026556, |
|
"learning_rate": 3.520521251930941e-05, |
|
"loss": 0.5642, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.5232, |
|
"grad_norm": 0.4497591263859104, |
|
"learning_rate": 3.5088318098215805e-05, |
|
"loss": 0.5859, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.536, |
|
"grad_norm": 0.6258514869389551, |
|
"learning_rate": 3.497021496342203e-05, |
|
"loss": 0.5711, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.5488, |
|
"grad_norm": 0.5686703454809979, |
|
"learning_rate": 3.485091257608047e-05, |
|
"loss": 0.5841, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.5615999999999999, |
|
"grad_norm": 0.3681018500089249, |
|
"learning_rate": 3.473042049341474e-05, |
|
"loss": 0.584, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.5744, |
|
"grad_norm": 0.42669869438547914, |
|
"learning_rate": 3.4608748367954064e-05, |
|
"loss": 0.5868, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.5872000000000002, |
|
"grad_norm": 0.5441247186904558, |
|
"learning_rate": 3.4485905946759965e-05, |
|
"loss": 0.5764, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.501140527539522, |
|
"learning_rate": 3.4361903070645484e-05, |
|
"loss": 0.5794, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.6128, |
|
"grad_norm": 0.509604255782758, |
|
"learning_rate": 3.423674967338681e-05, |
|
"loss": 0.5887, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.6256, |
|
"grad_norm": 0.468819660502078, |
|
"learning_rate": 3.411045578092754e-05, |
|
"loss": 0.5708, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.6383999999999999, |
|
"grad_norm": 0.5681474181696031, |
|
"learning_rate": 3.398303151057543e-05, |
|
"loss": 0.5764, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.6512, |
|
"grad_norm": 0.48439243320979486, |
|
"learning_rate": 3.385448707019199e-05, |
|
"loss": 0.5872, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.6640000000000001, |
|
"grad_norm": 0.5120908720823386, |
|
"learning_rate": 3.372483275737468e-05, |
|
"loss": 0.574, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.6768, |
|
"grad_norm": 0.6349706050854664, |
|
"learning_rate": 3.359407895863199e-05, |
|
"loss": 0.5654, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.6896, |
|
"grad_norm": 0.5797502972193269, |
|
"learning_rate": 3.34622361485514e-05, |
|
"loss": 0.5671, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.7024, |
|
"grad_norm": 0.47115877117844046, |
|
"learning_rate": 3.332931488896029e-05, |
|
"loss": 0.5808, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.7151999999999998, |
|
"grad_norm": 0.5315156219737786, |
|
"learning_rate": 3.319532582807977e-05, |
|
"loss": 0.5736, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.728, |
|
"grad_norm": 0.606597702185915, |
|
"learning_rate": 3.30602796996717e-05, |
|
"loss": 0.5737, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.7408000000000001, |
|
"grad_norm": 0.6363278961587933, |
|
"learning_rate": 3.2924187322178865e-05, |
|
"loss": 0.5778, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.7536, |
|
"grad_norm": 0.37272326213256113, |
|
"learning_rate": 3.278705959785821e-05, |
|
"loss": 0.5686, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.7664, |
|
"grad_norm": 0.5391746666140868, |
|
"learning_rate": 3.2648907511907544e-05, |
|
"loss": 0.5716, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.7792, |
|
"grad_norm": 0.4933426735591773, |
|
"learning_rate": 3.250974213158555e-05, |
|
"loss": 0.5786, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.792, |
|
"grad_norm": 0.47449680125291605, |
|
"learning_rate": 3.23695746053251e-05, |
|
"loss": 0.5663, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.8048, |
|
"grad_norm": 0.46684001938007624, |
|
"learning_rate": 3.222841616184025e-05, |
|
"loss": 0.5657, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.8176, |
|
"grad_norm": 0.47569872068326147, |
|
"learning_rate": 3.208627810922665e-05, |
|
"loss": 0.5747, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.8304, |
|
"grad_norm": 0.5626877646957587, |
|
"learning_rate": 3.194317183405573e-05, |
|
"loss": 0.5703, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.8432, |
|
"grad_norm": 0.5005964045377986, |
|
"learning_rate": 3.1799108800462466e-05, |
|
"loss": 0.5702, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.8559999999999999, |
|
"grad_norm": 0.4728803164820471, |
|
"learning_rate": 3.1654100549227024e-05, |
|
"loss": 0.5701, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.8688, |
|
"grad_norm": 0.6279759957965352, |
|
"learning_rate": 3.1508158696850275e-05, |
|
"loss": 0.5687, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.8816000000000002, |
|
"grad_norm": 0.4414026788106849, |
|
"learning_rate": 3.136129493462312e-05, |
|
"loss": 0.5709, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.8944, |
|
"grad_norm": 0.39742569125698024, |
|
"learning_rate": 3.121352102768998e-05, |
|
"loss": 0.5677, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.9072, |
|
"grad_norm": 0.46569036957339405, |
|
"learning_rate": 3.106484881410628e-05, |
|
"loss": 0.5757, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.4334250181502594, |
|
"learning_rate": 3.091529020389009e-05, |
|
"loss": 0.5675, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.9327999999999999, |
|
"grad_norm": 0.4204930094090842, |
|
"learning_rate": 3.076485717806808e-05, |
|
"loss": 0.5687, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.9456, |
|
"grad_norm": 0.4987314412738242, |
|
"learning_rate": 3.061356178771564e-05, |
|
"loss": 0.571, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.9584000000000001, |
|
"grad_norm": 0.46940615574832384, |
|
"learning_rate": 3.0461416152991555e-05, |
|
"loss": 0.5691, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.9712, |
|
"grad_norm": 0.5577877039238854, |
|
"learning_rate": 3.0308432462167045e-05, |
|
"loss": 0.5581, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.984, |
|
"grad_norm": 0.42448538136278574, |
|
"learning_rate": 3.015462297064936e-05, |
|
"loss": 0.5789, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.9968, |
|
"grad_norm": 0.6763612785070511, |
|
"learning_rate": 3.0000000000000004e-05, |
|
"loss": 0.5741, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 2.0096, |
|
"grad_norm": 0.5722503174192143, |
|
"learning_rate": 2.98445759369477e-05, |
|
"loss": 0.5143, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 2.0224, |
|
"grad_norm": 0.5026703958168597, |
|
"learning_rate": 2.9688363232396056e-05, |
|
"loss": 0.5223, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 2.0352, |
|
"grad_norm": 0.8492116119835177, |
|
"learning_rate": 2.9531374400426158e-05, |
|
"loss": 0.5036, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 2.048, |
|
"grad_norm": 0.913996520180068, |
|
"learning_rate": 2.9373622017294075e-05, |
|
"loss": 0.5197, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.0608, |
|
"grad_norm": 0.578935444154028, |
|
"learning_rate": 2.9215118720423375e-05, |
|
"loss": 0.4952, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 2.0736, |
|
"grad_norm": 0.5860904629062806, |
|
"learning_rate": 2.9055877207392752e-05, |
|
"loss": 0.5, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 2.0864, |
|
"grad_norm": 0.8044025573168775, |
|
"learning_rate": 2.8895910234918828e-05, |
|
"loss": 0.4917, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 2.0992, |
|
"grad_norm": 0.5906320253924908, |
|
"learning_rate": 2.873523061783426e-05, |
|
"loss": 0.5018, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 2.112, |
|
"grad_norm": 0.6498325035791859, |
|
"learning_rate": 2.8573851228061084e-05, |
|
"loss": 0.4965, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 2.1248, |
|
"grad_norm": 0.628304699648312, |
|
"learning_rate": 2.8411784993579633e-05, |
|
"loss": 0.4865, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 2.1376, |
|
"grad_norm": 0.5998061185741181, |
|
"learning_rate": 2.8249044897392814e-05, |
|
"loss": 0.491, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 2.1504, |
|
"grad_norm": 0.5981618933467712, |
|
"learning_rate": 2.80856439764861e-05, |
|
"loss": 0.5045, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 2.1632, |
|
"grad_norm": 0.4744854867041487, |
|
"learning_rate": 2.792159532078314e-05, |
|
"loss": 0.4894, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 2.176, |
|
"grad_norm": 0.49326360205323266, |
|
"learning_rate": 2.77569120720971e-05, |
|
"loss": 0.4894, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 2.1888, |
|
"grad_norm": 0.44948652290190905, |
|
"learning_rate": 2.7591607423077932e-05, |
|
"loss": 0.4981, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 2.2016, |
|
"grad_norm": 0.5056694712408589, |
|
"learning_rate": 2.7425694616155474e-05, |
|
"loss": 0.4947, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 2.2144, |
|
"grad_norm": 0.5313951384739188, |
|
"learning_rate": 2.7259186942478656e-05, |
|
"loss": 0.4947, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 2.2272, |
|
"grad_norm": 0.40427169688811354, |
|
"learning_rate": 2.7092097740850712e-05, |
|
"loss": 0.5054, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 0.5812741282451808, |
|
"learning_rate": 2.692444039666066e-05, |
|
"loss": 0.4981, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 2.2528, |
|
"grad_norm": 0.4295515657065, |
|
"learning_rate": 2.6756228340810946e-05, |
|
"loss": 0.5049, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 2.2656, |
|
"grad_norm": 0.42951431128473816, |
|
"learning_rate": 2.6587475048641596e-05, |
|
"loss": 0.4896, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 2.2784, |
|
"grad_norm": 0.5201501793555885, |
|
"learning_rate": 2.6418194038850634e-05, |
|
"loss": 0.494, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 2.2912, |
|
"grad_norm": 0.4290076152050141, |
|
"learning_rate": 2.624839887241115e-05, |
|
"loss": 0.4951, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 2.304, |
|
"grad_norm": 0.41103474663991013, |
|
"learning_rate": 2.607810315148494e-05, |
|
"loss": 0.4951, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 2.3168, |
|
"grad_norm": 0.3732307708169955, |
|
"learning_rate": 2.5907320518332827e-05, |
|
"loss": 0.4957, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 2.3296, |
|
"grad_norm": 0.43299916544822603, |
|
"learning_rate": 2.5736064654221808e-05, |
|
"loss": 0.4962, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 2.3424, |
|
"grad_norm": 0.36682921850650274, |
|
"learning_rate": 2.5564349278329056e-05, |
|
"loss": 0.4999, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 2.3552, |
|
"grad_norm": 0.3507507395182079, |
|
"learning_rate": 2.539218814664288e-05, |
|
"loss": 0.4897, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 2.368, |
|
"grad_norm": 0.4403078896816384, |
|
"learning_rate": 2.521959505086075e-05, |
|
"loss": 0.4942, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 2.3808, |
|
"grad_norm": 0.3760556219110924, |
|
"learning_rate": 2.5046583817284437e-05, |
|
"loss": 0.5047, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 2.3936, |
|
"grad_norm": 0.32749741599683097, |
|
"learning_rate": 2.487316830571244e-05, |
|
"loss": 0.4835, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 2.4064, |
|
"grad_norm": 0.3903898162096965, |
|
"learning_rate": 2.4699362408329646e-05, |
|
"loss": 0.5067, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 2.4192, |
|
"grad_norm": 0.38438738834323594, |
|
"learning_rate": 2.4525180048594452e-05, |
|
"loss": 0.4849, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 2.432, |
|
"grad_norm": 0.402863434106592, |
|
"learning_rate": 2.435063518012335e-05, |
|
"loss": 0.5002, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 2.4448, |
|
"grad_norm": 0.39000613279216245, |
|
"learning_rate": 2.4175741785573177e-05, |
|
"loss": 0.5136, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 2.4576000000000002, |
|
"grad_norm": 0.4419462350894651, |
|
"learning_rate": 2.4000513875520892e-05, |
|
"loss": 0.4922, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 2.4704, |
|
"grad_norm": 0.3606703689703085, |
|
"learning_rate": 2.3824965487341247e-05, |
|
"loss": 0.5045, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 2.4832, |
|
"grad_norm": 0.39658921683014065, |
|
"learning_rate": 2.3649110684082258e-05, |
|
"loss": 0.4992, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 2.496, |
|
"grad_norm": 0.37008936834657263, |
|
"learning_rate": 2.3472963553338614e-05, |
|
"loss": 0.5001, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 2.5088, |
|
"grad_norm": 0.38277633581754456, |
|
"learning_rate": 2.3296538206123134e-05, |
|
"loss": 0.4881, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 2.5216, |
|
"grad_norm": 0.34780459495046656, |
|
"learning_rate": 2.311984877573636e-05, |
|
"loss": 0.489, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 2.5343999999999998, |
|
"grad_norm": 0.42528032608133876, |
|
"learning_rate": 2.2942909416634326e-05, |
|
"loss": 0.4972, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 2.5472, |
|
"grad_norm": 0.3298875279115166, |
|
"learning_rate": 2.2765734303294666e-05, |
|
"loss": 0.4925, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 0.40522458502738923, |
|
"learning_rate": 2.2588337629081107e-05, |
|
"loss": 0.4987, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.5728, |
|
"grad_norm": 0.28890557367196196, |
|
"learning_rate": 2.2410733605106462e-05, |
|
"loss": 0.4828, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 2.5856, |
|
"grad_norm": 0.35752143768891786, |
|
"learning_rate": 2.2232936459094158e-05, |
|
"loss": 0.495, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 2.5984, |
|
"grad_norm": 0.2847955714786017, |
|
"learning_rate": 2.205496043423849e-05, |
|
"loss": 0.4923, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 2.6112, |
|
"grad_norm": 0.34462734916524024, |
|
"learning_rate": 2.1876819788063586e-05, |
|
"loss": 0.4919, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 2.624, |
|
"grad_norm": 0.28997561583741843, |
|
"learning_rate": 2.16985287912813e-05, |
|
"loss": 0.4876, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 2.6368, |
|
"grad_norm": 0.32736744376129595, |
|
"learning_rate": 2.1520101726647922e-05, |
|
"loss": 0.4889, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 2.6496, |
|
"grad_norm": 0.29552255599003757, |
|
"learning_rate": 2.1341552887820048e-05, |
|
"loss": 0.495, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 2.6624, |
|
"grad_norm": 0.2986535725397418, |
|
"learning_rate": 2.1162896578209517e-05, |
|
"loss": 0.4953, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 2.6752000000000002, |
|
"grad_norm": 0.32346026994057464, |
|
"learning_rate": 2.0984147109837564e-05, |
|
"loss": 0.4862, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 2.6879999999999997, |
|
"grad_norm": 0.3248091215402218, |
|
"learning_rate": 2.0805318802188307e-05, |
|
"loss": 0.4822, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.7008, |
|
"grad_norm": 0.3069758746580514, |
|
"learning_rate": 2.0626425981061608e-05, |
|
"loss": 0.5123, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 2.7136, |
|
"grad_norm": 0.3763288654288441, |
|
"learning_rate": 2.0447482977425465e-05, |
|
"loss": 0.4957, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 2.7264, |
|
"grad_norm": 0.3201346373064764, |
|
"learning_rate": 2.0268504126267952e-05, |
|
"loss": 0.4953, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 2.7392, |
|
"grad_norm": 0.39729074863968356, |
|
"learning_rate": 2.008950376544887e-05, |
|
"loss": 0.499, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 2.752, |
|
"grad_norm": 0.3304189063909658, |
|
"learning_rate": 1.9910496234551132e-05, |
|
"loss": 0.4959, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 2.7648, |
|
"grad_norm": 0.3430447918196304, |
|
"learning_rate": 1.9731495873732055e-05, |
|
"loss": 0.4934, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 2.7776, |
|
"grad_norm": 0.34316923398695875, |
|
"learning_rate": 1.9552517022574542e-05, |
|
"loss": 0.5064, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 2.7904, |
|
"grad_norm": 0.3481950457193192, |
|
"learning_rate": 1.93735740189384e-05, |
|
"loss": 0.4957, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 2.8032, |
|
"grad_norm": 0.34710786210143224, |
|
"learning_rate": 1.9194681197811703e-05, |
|
"loss": 0.4884, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 2.816, |
|
"grad_norm": 0.3347710607736284, |
|
"learning_rate": 1.901585289016244e-05, |
|
"loss": 0.4891, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.8288, |
|
"grad_norm": 0.3343701183653994, |
|
"learning_rate": 1.8837103421790486e-05, |
|
"loss": 0.4981, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 2.8416, |
|
"grad_norm": 0.3521372001758378, |
|
"learning_rate": 1.8658447112179952e-05, |
|
"loss": 0.5067, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 2.8544, |
|
"grad_norm": 0.3041068550935644, |
|
"learning_rate": 1.8479898273352084e-05, |
|
"loss": 0.4868, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 2.8672, |
|
"grad_norm": 0.3210055574329485, |
|
"learning_rate": 1.83014712087187e-05, |
|
"loss": 0.4953, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 0.27212342350119617, |
|
"learning_rate": 1.8123180211936417e-05, |
|
"loss": 0.4914, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.8928000000000003, |
|
"grad_norm": 0.2988079253064433, |
|
"learning_rate": 1.794503956576152e-05, |
|
"loss": 0.5055, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 2.9055999999999997, |
|
"grad_norm": 0.3249357836053954, |
|
"learning_rate": 1.776706354090585e-05, |
|
"loss": 0.4963, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 2.9184, |
|
"grad_norm": 0.30877687777261964, |
|
"learning_rate": 1.758926639489354e-05, |
|
"loss": 0.5087, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 2.9312, |
|
"grad_norm": 0.34825679402688037, |
|
"learning_rate": 1.7411662370918893e-05, |
|
"loss": 0.4852, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 2.944, |
|
"grad_norm": 0.3122362652270399, |
|
"learning_rate": 1.7234265696705344e-05, |
|
"loss": 0.4927, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.9568, |
|
"grad_norm": 0.3555980989681846, |
|
"learning_rate": 1.7057090583365678e-05, |
|
"loss": 0.4993, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 2.9696, |
|
"grad_norm": 0.2890714695184332, |
|
"learning_rate": 1.6880151224263646e-05, |
|
"loss": 0.4791, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 2.9824, |
|
"grad_norm": 0.3340506927436075, |
|
"learning_rate": 1.6703461793876876e-05, |
|
"loss": 0.5169, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 2.9952, |
|
"grad_norm": 0.31408243123886154, |
|
"learning_rate": 1.6527036446661396e-05, |
|
"loss": 0.4956, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 3.008, |
|
"grad_norm": 0.40654585096835055, |
|
"learning_rate": 1.635088931591775e-05, |
|
"loss": 0.4588, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 3.0208, |
|
"grad_norm": 0.419884900413749, |
|
"learning_rate": 1.6175034512658753e-05, |
|
"loss": 0.4254, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 3.0336, |
|
"grad_norm": 0.6008062571027746, |
|
"learning_rate": 1.5999486124479115e-05, |
|
"loss": 0.433, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 3.0464, |
|
"grad_norm": 0.4465527398249857, |
|
"learning_rate": 1.5824258214426833e-05, |
|
"loss": 0.444, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 3.0592, |
|
"grad_norm": 0.4193803025980898, |
|
"learning_rate": 1.5649364819876655e-05, |
|
"loss": 0.4313, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 3.072, |
|
"grad_norm": 0.4125516108235289, |
|
"learning_rate": 1.547481995140556e-05, |
|
"loss": 0.4103, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 3.0848, |
|
"grad_norm": 0.41021467807578205, |
|
"learning_rate": 1.5300637591670357e-05, |
|
"loss": 0.4212, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 3.0976, |
|
"grad_norm": 0.3790875492885813, |
|
"learning_rate": 1.5126831694287564e-05, |
|
"loss": 0.4334, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 3.1104, |
|
"grad_norm": 0.42829444758072965, |
|
"learning_rate": 1.4953416182715566e-05, |
|
"loss": 0.4269, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 3.1232, |
|
"grad_norm": 0.3544697233015643, |
|
"learning_rate": 1.478040494913926e-05, |
|
"loss": 0.4277, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 3.136, |
|
"grad_norm": 0.4040035567489867, |
|
"learning_rate": 1.460781185335713e-05, |
|
"loss": 0.4312, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 3.1488, |
|
"grad_norm": 0.3438830200025999, |
|
"learning_rate": 1.443565072167095e-05, |
|
"loss": 0.4408, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 3.1616, |
|
"grad_norm": 0.3457383948694485, |
|
"learning_rate": 1.4263935345778202e-05, |
|
"loss": 0.4318, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 3.1744, |
|
"grad_norm": 0.3209468399576312, |
|
"learning_rate": 1.409267948166718e-05, |
|
"loss": 0.4282, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 3.1872, |
|
"grad_norm": 0.31904439275611846, |
|
"learning_rate": 1.3921896848515064e-05, |
|
"loss": 0.435, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 0.3247930622800883, |
|
"learning_rate": 1.3751601127588849e-05, |
|
"loss": 0.4155, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 3.2128, |
|
"grad_norm": 0.2873541554352374, |
|
"learning_rate": 1.3581805961149371e-05, |
|
"loss": 0.4364, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 3.2256, |
|
"grad_norm": 0.34081150707234453, |
|
"learning_rate": 1.341252495135841e-05, |
|
"loss": 0.421, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 3.2384, |
|
"grad_norm": 0.2948836577244133, |
|
"learning_rate": 1.324377165918906e-05, |
|
"loss": 0.4368, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 3.2512, |
|
"grad_norm": 0.3162585544662662, |
|
"learning_rate": 1.3075559603339354e-05, |
|
"loss": 0.4278, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 3.2640000000000002, |
|
"grad_norm": 0.3314459330554049, |
|
"learning_rate": 1.2907902259149287e-05, |
|
"loss": 0.427, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 3.2768, |
|
"grad_norm": 0.2776568172512342, |
|
"learning_rate": 1.274081305752135e-05, |
|
"loss": 0.4361, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 3.2896, |
|
"grad_norm": 0.32208249694143454, |
|
"learning_rate": 1.2574305383844528e-05, |
|
"loss": 0.4203, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 3.3024, |
|
"grad_norm": 0.3027462367980654, |
|
"learning_rate": 1.2408392576922075e-05, |
|
"loss": 0.4248, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 3.3152, |
|
"grad_norm": 0.28819685100487374, |
|
"learning_rate": 1.2243087927902905e-05, |
|
"loss": 0.4288, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 3.328, |
|
"grad_norm": 0.2980531747486014, |
|
"learning_rate": 1.2078404679216864e-05, |
|
"loss": 0.4232, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 3.3407999999999998, |
|
"grad_norm": 0.2890118308205293, |
|
"learning_rate": 1.1914356023513904e-05, |
|
"loss": 0.4139, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 3.3536, |
|
"grad_norm": 0.26366169093621045, |
|
"learning_rate": 1.1750955102607193e-05, |
|
"loss": 0.4319, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 3.3664, |
|
"grad_norm": 0.2728501624545788, |
|
"learning_rate": 1.1588215006420374e-05, |
|
"loss": 0.4193, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 3.3792, |
|
"grad_norm": 0.29681862061736447, |
|
"learning_rate": 1.1426148771938915e-05, |
|
"loss": 0.426, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 3.392, |
|
"grad_norm": 0.2544508537998433, |
|
"learning_rate": 1.1264769382165748e-05, |
|
"loss": 0.4187, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 3.4048, |
|
"grad_norm": 0.3207370911151253, |
|
"learning_rate": 1.110408976508118e-05, |
|
"loss": 0.4184, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 3.4176, |
|
"grad_norm": 0.2649553530642913, |
|
"learning_rate": 1.094412279260726e-05, |
|
"loss": 0.4232, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 3.4304, |
|
"grad_norm": 0.2575834273446465, |
|
"learning_rate": 1.0784881279576635e-05, |
|
"loss": 0.4211, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 3.4432, |
|
"grad_norm": 0.30780701681300127, |
|
"learning_rate": 1.0626377982705929e-05, |
|
"loss": 0.4244, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 3.456, |
|
"grad_norm": 0.28170446467321153, |
|
"learning_rate": 1.0468625599573842e-05, |
|
"loss": 0.4199, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 3.4688, |
|
"grad_norm": 0.2637541363350921, |
|
"learning_rate": 1.0311636767603952e-05, |
|
"loss": 0.4214, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 3.4816, |
|
"grad_norm": 0.27966307752352326, |
|
"learning_rate": 1.0155424063052306e-05, |
|
"loss": 0.4319, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 3.4944, |
|
"grad_norm": 0.2689145271383354, |
|
"learning_rate": 1.0000000000000006e-05, |
|
"loss": 0.4294, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 3.5072, |
|
"grad_norm": 0.26132213013856004, |
|
"learning_rate": 9.84537702935065e-06, |
|
"loss": 0.4279, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"grad_norm": 0.4649247492039518, |
|
"learning_rate": 9.691567537832964e-06, |
|
"loss": 0.4163, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 3.5328, |
|
"grad_norm": 0.25611463232583487, |
|
"learning_rate": 9.538583847008452e-06, |
|
"loss": 0.4316, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 3.5456, |
|
"grad_norm": 0.2566981779171312, |
|
"learning_rate": 9.386438212284372e-06, |
|
"loss": 0.4159, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 3.5584, |
|
"grad_norm": 0.24958050574233248, |
|
"learning_rate": 9.235142821931928e-06, |
|
"loss": 0.4287, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 3.5712, |
|
"grad_norm": 0.24836265500751314, |
|
"learning_rate": 9.084709796109907e-06, |
|
"loss": 0.4275, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 3.584, |
|
"grad_norm": 0.24692726709965532, |
|
"learning_rate": 8.93515118589373e-06, |
|
"loss": 0.4267, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 3.5968, |
|
"grad_norm": 0.24609086568215463, |
|
"learning_rate": 8.786478972310023e-06, |
|
"loss": 0.4092, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 3.6096, |
|
"grad_norm": 0.23954856666870555, |
|
"learning_rate": 8.638705065376887e-06, |
|
"loss": 0.4195, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 3.6224, |
|
"grad_norm": 0.23364220624396864, |
|
"learning_rate": 8.491841303149728e-06, |
|
"loss": 0.4329, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 3.6352, |
|
"grad_norm": 0.22609657976453879, |
|
"learning_rate": 8.345899450772975e-06, |
|
"loss": 0.4365, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 3.648, |
|
"grad_norm": 0.2519088471123882, |
|
"learning_rate": 8.200891199537549e-06, |
|
"loss": 0.406, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 3.6608, |
|
"grad_norm": 0.24226192603330354, |
|
"learning_rate": 8.056828165944282e-06, |
|
"loss": 0.4294, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 3.6736, |
|
"grad_norm": 0.24703777332038798, |
|
"learning_rate": 7.913721890773354e-06, |
|
"loss": 0.4235, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 3.6864, |
|
"grad_norm": 0.2416819894481494, |
|
"learning_rate": 7.771583838159756e-06, |
|
"loss": 0.4236, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 3.6992000000000003, |
|
"grad_norm": 0.2437920000095238, |
|
"learning_rate": 7.630425394674903e-06, |
|
"loss": 0.419, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 3.7119999999999997, |
|
"grad_norm": 0.23327557264379006, |
|
"learning_rate": 7.49025786841445e-06, |
|
"loss": 0.4334, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 3.7248, |
|
"grad_norm": 0.2293395410726176, |
|
"learning_rate": 7.3510924880924575e-06, |
|
"loss": 0.4235, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 3.7376, |
|
"grad_norm": 0.22677999885899983, |
|
"learning_rate": 7.212940402141808e-06, |
|
"loss": 0.4176, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 3.7504, |
|
"grad_norm": 0.23165615642225584, |
|
"learning_rate": 7.075812677821145e-06, |
|
"loss": 0.4185, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 3.7632, |
|
"grad_norm": 0.25076188779206476, |
|
"learning_rate": 6.939720300328303e-06, |
|
"loss": 0.413, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 3.776, |
|
"grad_norm": 0.24230610135722463, |
|
"learning_rate": 6.8046741719202385e-06, |
|
"loss": 0.4195, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 3.7888, |
|
"grad_norm": 0.22966849116061802, |
|
"learning_rate": 6.67068511103971e-06, |
|
"loss": 0.4383, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 3.8016, |
|
"grad_norm": 0.237876486704717, |
|
"learning_rate": 6.537763851448593e-06, |
|
"loss": 0.4212, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 3.8144, |
|
"grad_norm": 0.23420288846813658, |
|
"learning_rate": 6.4059210413680175e-06, |
|
"loss": 0.424, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 3.8272, |
|
"grad_norm": 0.22855408286153545, |
|
"learning_rate": 6.275167242625331e-06, |
|
"loss": 0.4216, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"grad_norm": 0.22496842605104692, |
|
"learning_rate": 6.145512929808013e-06, |
|
"loss": 0.4268, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 3.8528000000000002, |
|
"grad_norm": 0.260580979935843, |
|
"learning_rate": 6.016968489424572e-06, |
|
"loss": 0.4362, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 3.8656, |
|
"grad_norm": 0.23515581942316058, |
|
"learning_rate": 5.889544219072465e-06, |
|
"loss": 0.4227, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 3.8784, |
|
"grad_norm": 0.2301821744711806, |
|
"learning_rate": 5.7632503266131925e-06, |
|
"loss": 0.4115, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 3.8912, |
|
"grad_norm": 0.24041427254233036, |
|
"learning_rate": 5.638096929354522e-06, |
|
"loss": 0.4163, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 3.904, |
|
"grad_norm": 0.22962672133758927, |
|
"learning_rate": 5.514094053240035e-06, |
|
"loss": 0.412, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 3.9168, |
|
"grad_norm": 0.2546876405009508, |
|
"learning_rate": 5.39125163204594e-06, |
|
"loss": 0.4453, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 3.9295999999999998, |
|
"grad_norm": 0.251874165605757, |
|
"learning_rate": 5.269579506585259e-06, |
|
"loss": 0.4226, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 3.9424, |
|
"grad_norm": 0.24762103003628663, |
|
"learning_rate": 5.149087423919541e-06, |
|
"loss": 0.4213, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 3.9552, |
|
"grad_norm": 0.23547656543525808, |
|
"learning_rate": 5.029785036577976e-06, |
|
"loss": 0.4213, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 3.968, |
|
"grad_norm": 0.22124698994060055, |
|
"learning_rate": 4.911681901784198e-06, |
|
"loss": 0.4322, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 3.9808, |
|
"grad_norm": 0.23151356501707346, |
|
"learning_rate": 4.794787480690597e-06, |
|
"loss": 0.4299, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 3.9936, |
|
"grad_norm": 0.2222851779764549, |
|
"learning_rate": 4.679111137620442e-06, |
|
"loss": 0.4267, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 4.0064, |
|
"grad_norm": 0.27313353289247716, |
|
"learning_rate": 4.5646621393177e-06, |
|
"loss": 0.4036, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 4.0192, |
|
"grad_norm": 0.37647735983296937, |
|
"learning_rate": 4.451449654204685e-06, |
|
"loss": 0.3924, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 4.032, |
|
"grad_norm": 0.28037822372132515, |
|
"learning_rate": 4.339482751647557e-06, |
|
"loss": 0.3638, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 4.0448, |
|
"grad_norm": 0.3125956471742338, |
|
"learning_rate": 4.228770401229824e-06, |
|
"loss": 0.3823, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 4.0576, |
|
"grad_norm": 0.48895993038764357, |
|
"learning_rate": 4.119321472033779e-06, |
|
"loss": 0.3775, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 4.0704, |
|
"grad_norm": 0.3445166504068549, |
|
"learning_rate": 4.011144731929981e-06, |
|
"loss": 0.3813, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 4.0832, |
|
"grad_norm": 0.30571755702603826, |
|
"learning_rate": 3.904248846874894e-06, |
|
"loss": 0.3979, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 4.096, |
|
"grad_norm": 0.33530399695171575, |
|
"learning_rate": 3.7986423802166705e-06, |
|
"loss": 0.3839, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 4.1088, |
|
"grad_norm": 0.30971433570989015, |
|
"learning_rate": 3.694333792009115e-06, |
|
"loss": 0.3807, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 4.1216, |
|
"grad_norm": 0.31386159719572704, |
|
"learning_rate": 3.5913314383339937e-06, |
|
"loss": 0.3578, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 4.1344, |
|
"grad_norm": 0.26836719144538546, |
|
"learning_rate": 3.4896435706316e-06, |
|
"loss": 0.3817, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 4.1472, |
|
"grad_norm": 0.28412212263969033, |
|
"learning_rate": 3.3892783350397675e-06, |
|
"loss": 0.3889, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"grad_norm": 0.29762400347113643, |
|
"learning_rate": 3.290243771741275e-06, |
|
"loss": 0.3846, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 4.1728, |
|
"grad_norm": 0.2860278447290372, |
|
"learning_rate": 3.1925478143197418e-06, |
|
"loss": 0.3701, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 4.1856, |
|
"grad_norm": 0.2706472326402335, |
|
"learning_rate": 3.0961982891241083e-06, |
|
"loss": 0.3775, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 4.1984, |
|
"grad_norm": 0.26185114842096163, |
|
"learning_rate": 3.001202914641628e-06, |
|
"loss": 0.3694, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 4.2112, |
|
"grad_norm": 0.2612562139510835, |
|
"learning_rate": 2.907569300879596e-06, |
|
"loss": 0.3931, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 4.224, |
|
"grad_norm": 0.25162090008945814, |
|
"learning_rate": 2.815304948755664e-06, |
|
"loss": 0.3722, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 4.2368, |
|
"grad_norm": 0.23993277918222877, |
|
"learning_rate": 2.7244172494969978e-06, |
|
"loss": 0.3979, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 4.2496, |
|
"grad_norm": 0.23113009585064762, |
|
"learning_rate": 2.6349134840481294e-06, |
|
"loss": 0.3781, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 4.2624, |
|
"grad_norm": 0.2318792430421106, |
|
"learning_rate": 2.546800822487714e-06, |
|
"loss": 0.3693, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 4.2752, |
|
"grad_norm": 0.23481091374966023, |
|
"learning_rate": 2.4600863234541338e-06, |
|
"loss": 0.4017, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 4.288, |
|
"grad_norm": 0.2271355649495333, |
|
"learning_rate": 2.374776933580025e-06, |
|
"loss": 0.3788, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 4.3008, |
|
"grad_norm": 0.2242750207050269, |
|
"learning_rate": 2.2908794869358044e-06, |
|
"loss": 0.3809, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 4.3136, |
|
"grad_norm": 0.22750939685861177, |
|
"learning_rate": 2.2084007044821764e-06, |
|
"loss": 0.3826, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 4.3264, |
|
"grad_norm": 0.22032122669334825, |
|
"learning_rate": 2.127347193531757e-06, |
|
"loss": 0.3767, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 4.3392, |
|
"grad_norm": 0.21396305180442826, |
|
"learning_rate": 2.0477254472197237e-06, |
|
"loss": 0.3805, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 4.352, |
|
"grad_norm": 0.21808076268228624, |
|
"learning_rate": 1.96954184398368e-06, |
|
"loss": 0.3723, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 4.3648, |
|
"grad_norm": 0.22918310419902882, |
|
"learning_rate": 1.8928026470526917e-06, |
|
"loss": 0.3721, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 4.3776, |
|
"grad_norm": 0.21912269552519437, |
|
"learning_rate": 1.817514003945524e-06, |
|
"loss": 0.3674, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 4.3904, |
|
"grad_norm": 0.21061760499423257, |
|
"learning_rate": 1.743681945978184e-06, |
|
"loss": 0.3668, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 4.4032, |
|
"grad_norm": 0.2135093244604889, |
|
"learning_rate": 1.6713123877807413e-06, |
|
"loss": 0.3667, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 4.416, |
|
"grad_norm": 0.20676744028641914, |
|
"learning_rate": 1.6004111268235156e-06, |
|
"loss": 0.3856, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 4.4288, |
|
"grad_norm": 0.21342342355138336, |
|
"learning_rate": 1.5309838429526714e-06, |
|
"loss": 0.4001, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 4.4416, |
|
"grad_norm": 0.21295302256781662, |
|
"learning_rate": 1.4630360979351644e-06, |
|
"loss": 0.3966, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 4.4544, |
|
"grad_norm": 0.20877331416746198, |
|
"learning_rate": 1.396573335013236e-06, |
|
"loss": 0.3894, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 4.4672, |
|
"grad_norm": 0.21727350357530545, |
|
"learning_rate": 1.3316008784683265e-06, |
|
"loss": 0.3691, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"grad_norm": 0.21847302181712927, |
|
"learning_rate": 1.2681239331945695e-06, |
|
"loss": 0.3887, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 4.4928, |
|
"grad_norm": 0.212011319409588, |
|
"learning_rate": 1.2061475842818337e-06, |
|
"loss": 0.3905, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 4.5056, |
|
"grad_norm": 0.20316624291873847, |
|
"learning_rate": 1.1456767966083393e-06, |
|
"loss": 0.384, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 4.5184, |
|
"grad_norm": 0.20640523424617857, |
|
"learning_rate": 1.086716414442952e-06, |
|
"loss": 0.3797, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 4.5312, |
|
"grad_norm": 0.2054997141217928, |
|
"learning_rate": 1.0292711610570904e-06, |
|
"loss": 0.3737, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 4.5440000000000005, |
|
"grad_norm": 0.20057769739419745, |
|
"learning_rate": 9.733456383463658e-07, |
|
"loss": 0.3815, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 4.5568, |
|
"grad_norm": 0.20074679817279875, |
|
"learning_rate": 9.189443264619102e-07, |
|
"loss": 0.3779, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 4.5696, |
|
"grad_norm": 0.20327244059916494, |
|
"learning_rate": 8.660715834514977e-07, |
|
"loss": 0.3756, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 4.5824, |
|
"grad_norm": 0.2046938820000016, |
|
"learning_rate": 8.147316449103959e-07, |
|
"loss": 0.385, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 4.5952, |
|
"grad_norm": 0.20842525006937218, |
|
"learning_rate": 7.649286236420806e-07, |
|
"loss": 0.3787, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 4.608, |
|
"grad_norm": 0.19726586055435785, |
|
"learning_rate": 7.166665093287539e-07, |
|
"loss": 0.4023, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 4.6208, |
|
"grad_norm": 0.19706019306135975, |
|
"learning_rate": 6.69949168211721e-07, |
|
"loss": 0.3768, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 4.6336, |
|
"grad_norm": 0.1930540936571029, |
|
"learning_rate": 6.247803427816945e-07, |
|
"loss": 0.3682, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 4.6464, |
|
"grad_norm": 0.20579857751364183, |
|
"learning_rate": 5.811636514789598e-07, |
|
"loss": 0.3651, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 4.6592, |
|
"grad_norm": 0.20820600565350564, |
|
"learning_rate": 5.391025884035239e-07, |
|
"loss": 0.3761, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 4.672, |
|
"grad_norm": 0.20574772820004417, |
|
"learning_rate": 4.986005230351954e-07, |
|
"loss": 0.3818, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 4.6848, |
|
"grad_norm": 0.202604848883481, |
|
"learning_rate": 4.5966069996365993e-07, |
|
"loss": 0.3799, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 4.6975999999999996, |
|
"grad_norm": 0.1944897386871682, |
|
"learning_rate": 4.22286238628562e-07, |
|
"loss": 0.3902, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 4.7104, |
|
"grad_norm": 0.1994640180714002, |
|
"learning_rate": 3.8648013306960664e-07, |
|
"loss": 0.3741, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 4.7232, |
|
"grad_norm": 0.2040530220053012, |
|
"learning_rate": 3.522452516867048e-07, |
|
"loss": 0.3865, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 4.736, |
|
"grad_norm": 0.20017182295693167, |
|
"learning_rate": 3.1958433701019697e-07, |
|
"loss": 0.3904, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 4.7488, |
|
"grad_norm": 0.20525505206304456, |
|
"learning_rate": 2.8850000548115155e-07, |
|
"loss": 0.3801, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 4.7616, |
|
"grad_norm": 0.20293579102879616, |
|
"learning_rate": 2.5899474724174313e-07, |
|
"loss": 0.3825, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 4.7744, |
|
"grad_norm": 0.19636267825368714, |
|
"learning_rate": 2.3107092593579905e-07, |
|
"loss": 0.378, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 4.7872, |
|
"grad_norm": 0.19655957456657486, |
|
"learning_rate": 2.0473077851942858e-07, |
|
"loss": 0.3907, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"grad_norm": 0.2063623796074329, |
|
"learning_rate": 1.799764150818306e-07, |
|
"loss": 0.3663, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 4.8128, |
|
"grad_norm": 0.19683990366572185, |
|
"learning_rate": 1.5680981867625566e-07, |
|
"loss": 0.3912, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 4.8256, |
|
"grad_norm": 0.19932367917369181, |
|
"learning_rate": 1.3523284516113955e-07, |
|
"loss": 0.3879, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 4.8384, |
|
"grad_norm": 0.19792117174200943, |
|
"learning_rate": 1.1524722305144231e-07, |
|
"loss": 0.3678, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 4.8512, |
|
"grad_norm": 0.20001620409625334, |
|
"learning_rate": 9.685455338016347e-08, |
|
"loss": 0.3746, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 4.864, |
|
"grad_norm": 0.1990001961157355, |
|
"learning_rate": 8.005630957010014e-08, |
|
"loss": 0.3888, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 4.8768, |
|
"grad_norm": 0.20240824252767245, |
|
"learning_rate": 6.485383731580142e-08, |
|
"loss": 0.3595, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 4.8896, |
|
"grad_norm": 0.19804532092587782, |
|
"learning_rate": 5.1248354475768034e-08, |
|
"loss": 0.3877, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 4.9024, |
|
"grad_norm": 0.20626940327483265, |
|
"learning_rate": 3.924095097489922e-08, |
|
"loss": 0.3837, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 4.9152000000000005, |
|
"grad_norm": 0.19646889767438333, |
|
"learning_rate": 2.8832588717164766e-08, |
|
"loss": 0.3817, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 4.928, |
|
"grad_norm": 0.19958394868575255, |
|
"learning_rate": 2.0024101508555604e-08, |
|
"loss": 0.3791, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 4.9408, |
|
"grad_norm": 0.20335317670077688, |
|
"learning_rate": 1.281619499029274e-08, |
|
"loss": 0.3868, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 4.9536, |
|
"grad_norm": 0.2031273073431524, |
|
"learning_rate": 7.209446582292501e-09, |
|
"loss": 0.3642, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 4.9664, |
|
"grad_norm": 0.19642636644665254, |
|
"learning_rate": 3.2043054369057523e-09, |
|
"loss": 0.372, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 4.9792, |
|
"grad_norm": 0.2028644971237751, |
|
"learning_rate": 8.010924029533406e-10, |
|
"loss": 0.3661, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 4.992, |
|
"grad_norm": 0.2008515231696296, |
|
"learning_rate": 0.0, |
|
"loss": 0.3784, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 4.992, |
|
"step": 390, |
|
"total_flos": 2.1484573977508577e+18, |
|
"train_loss": 0.5224053398156777, |
|
"train_runtime": 63510.1122, |
|
"train_samples_per_second": 0.787, |
|
"train_steps_per_second": 0.006 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 390, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.1484573977508577e+18, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|