|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.992, |
|
"eval_steps": 500, |
|
"global_step": 390, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0128, |
|
"grad_norm": 7.441595063490035, |
|
"learning_rate": 1.0256410256410257e-06, |
|
"loss": 1.12, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0256, |
|
"grad_norm": 7.484638177083275, |
|
"learning_rate": 2.0512820512820513e-06, |
|
"loss": 1.1255, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0384, |
|
"grad_norm": 7.202111396440288, |
|
"learning_rate": 3.0769230769230774e-06, |
|
"loss": 1.0896, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0512, |
|
"grad_norm": 6.452170654218077, |
|
"learning_rate": 4.102564102564103e-06, |
|
"loss": 1.0644, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.064, |
|
"grad_norm": 5.188815843122055, |
|
"learning_rate": 5.128205128205128e-06, |
|
"loss": 1.0445, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0768, |
|
"grad_norm": 3.247377461004119, |
|
"learning_rate": 6.153846153846155e-06, |
|
"loss": 0.997, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0896, |
|
"grad_norm": 2.7719390197110103, |
|
"learning_rate": 7.17948717948718e-06, |
|
"loss": 0.9664, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.1024, |
|
"grad_norm": 4.4823818390219525, |
|
"learning_rate": 8.205128205128205e-06, |
|
"loss": 0.9717, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.1152, |
|
"grad_norm": 4.242204249260898, |
|
"learning_rate": 9.230769230769232e-06, |
|
"loss": 0.9553, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.128, |
|
"grad_norm": 4.255605010455584, |
|
"learning_rate": 1.0256410256410256e-05, |
|
"loss": 0.9199, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.1408, |
|
"grad_norm": 3.9237316733011736, |
|
"learning_rate": 1.1282051282051283e-05, |
|
"loss": 0.8879, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.1536, |
|
"grad_norm": 2.693596381156768, |
|
"learning_rate": 1.230769230769231e-05, |
|
"loss": 0.8633, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.1664, |
|
"grad_norm": 2.4889591499979207, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 0.8242, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.1792, |
|
"grad_norm": 2.3543634039701615, |
|
"learning_rate": 1.435897435897436e-05, |
|
"loss": 0.8138, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.192, |
|
"grad_norm": 2.075920207094057, |
|
"learning_rate": 1.5384615384615387e-05, |
|
"loss": 0.8056, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.2048, |
|
"grad_norm": 1.7155500914814588, |
|
"learning_rate": 1.641025641025641e-05, |
|
"loss": 0.7874, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.2176, |
|
"grad_norm": 1.4914607967548483, |
|
"learning_rate": 1.7435897435897438e-05, |
|
"loss": 0.7711, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.2304, |
|
"grad_norm": 1.5009937594874494, |
|
"learning_rate": 1.8461538461538465e-05, |
|
"loss": 0.7573, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.2432, |
|
"grad_norm": 1.2398047186940726, |
|
"learning_rate": 1.9487179487179488e-05, |
|
"loss": 0.7462, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.256, |
|
"grad_norm": 1.1963702252810566, |
|
"learning_rate": 2.0512820512820512e-05, |
|
"loss": 0.7413, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.2688, |
|
"grad_norm": 1.3150779973188758, |
|
"learning_rate": 2.153846153846154e-05, |
|
"loss": 0.7376, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.2816, |
|
"grad_norm": 1.159843431165733, |
|
"learning_rate": 2.2564102564102566e-05, |
|
"loss": 0.7255, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.2944, |
|
"grad_norm": 1.2063555822294771, |
|
"learning_rate": 2.3589743589743593e-05, |
|
"loss": 0.7215, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.3072, |
|
"grad_norm": 1.1109265403583821, |
|
"learning_rate": 2.461538461538462e-05, |
|
"loss": 0.703, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.1720897213199182, |
|
"learning_rate": 2.5641025641025646e-05, |
|
"loss": 0.7012, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.3328, |
|
"grad_norm": 1.2825211703773385, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 0.7153, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.3456, |
|
"grad_norm": 0.8308063777209478, |
|
"learning_rate": 2.7692307692307694e-05, |
|
"loss": 0.7024, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.3584, |
|
"grad_norm": 1.0281482576483572, |
|
"learning_rate": 2.871794871794872e-05, |
|
"loss": 0.6948, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.3712, |
|
"grad_norm": 0.8200341157177883, |
|
"learning_rate": 2.9743589743589747e-05, |
|
"loss": 0.701, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.384, |
|
"grad_norm": 1.060566942365839, |
|
"learning_rate": 3.0769230769230774e-05, |
|
"loss": 0.6869, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.3968, |
|
"grad_norm": 1.1242162874486763, |
|
"learning_rate": 3.1794871794871795e-05, |
|
"loss": 0.6801, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.4096, |
|
"grad_norm": 0.7775219332751483, |
|
"learning_rate": 3.282051282051282e-05, |
|
"loss": 0.6814, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.4224, |
|
"grad_norm": 1.2352462802201458, |
|
"learning_rate": 3.384615384615385e-05, |
|
"loss": 0.6894, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.4352, |
|
"grad_norm": 1.2277510115788453, |
|
"learning_rate": 3.4871794871794875e-05, |
|
"loss": 0.6977, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.448, |
|
"grad_norm": 0.9228331091805946, |
|
"learning_rate": 3.58974358974359e-05, |
|
"loss": 0.6591, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.4608, |
|
"grad_norm": 1.7174685276202803, |
|
"learning_rate": 3.692307692307693e-05, |
|
"loss": 0.679, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.4736, |
|
"grad_norm": 1.176892260603213, |
|
"learning_rate": 3.794871794871795e-05, |
|
"loss": 0.6894, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.4864, |
|
"grad_norm": 1.0278248870635087, |
|
"learning_rate": 3.8974358974358976e-05, |
|
"loss": 0.6669, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.4992, |
|
"grad_norm": 1.3605533899907176, |
|
"learning_rate": 4e-05, |
|
"loss": 0.6717, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.512, |
|
"grad_norm": 1.7285485484642533, |
|
"learning_rate": 3.9999198907597046e-05, |
|
"loss": 0.6762, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.5248, |
|
"grad_norm": 0.9941589848148505, |
|
"learning_rate": 3.9996795694563096e-05, |
|
"loss": 0.6515, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.5376, |
|
"grad_norm": 1.3925480817395375, |
|
"learning_rate": 3.999279055341771e-05, |
|
"loss": 0.6684, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.5504, |
|
"grad_norm": 1.3738521223323201, |
|
"learning_rate": 3.998718380500971e-05, |
|
"loss": 0.6763, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.5632, |
|
"grad_norm": 1.0279109549483008, |
|
"learning_rate": 3.997997589849145e-05, |
|
"loss": 0.6664, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.576, |
|
"grad_norm": 1.3481950601071695, |
|
"learning_rate": 3.9971167411282835e-05, |
|
"loss": 0.6638, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.5888, |
|
"grad_norm": 0.9119821810608826, |
|
"learning_rate": 3.99607590490251e-05, |
|
"loss": 0.6697, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.6016, |
|
"grad_norm": 0.9919432290349403, |
|
"learning_rate": 3.9948751645524235e-05, |
|
"loss": 0.6577, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.6144, |
|
"grad_norm": 1.2138896158517192, |
|
"learning_rate": 3.9935146162684206e-05, |
|
"loss": 0.6666, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.6272, |
|
"grad_norm": 1.2080681941689464, |
|
"learning_rate": 3.9919943690429906e-05, |
|
"loss": 0.6559, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.1239281784032824, |
|
"learning_rate": 3.9903145446619837e-05, |
|
"loss": 0.6605, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.6528, |
|
"grad_norm": 0.8166856534066423, |
|
"learning_rate": 3.9884752776948564e-05, |
|
"loss": 0.6521, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.6656, |
|
"grad_norm": 1.196527954842215, |
|
"learning_rate": 3.9864767154838864e-05, |
|
"loss": 0.6509, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.6784, |
|
"grad_norm": 0.7983369444352492, |
|
"learning_rate": 3.9843190181323744e-05, |
|
"loss": 0.6635, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.6912, |
|
"grad_norm": 0.9218480580978282, |
|
"learning_rate": 3.982002358491817e-05, |
|
"loss": 0.6476, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.704, |
|
"grad_norm": 0.7282292792713069, |
|
"learning_rate": 3.979526922148058e-05, |
|
"loss": 0.6559, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.7168, |
|
"grad_norm": 0.8484906554368669, |
|
"learning_rate": 3.9768929074064206e-05, |
|
"loss": 0.6494, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.7296, |
|
"grad_norm": 0.9186950061345318, |
|
"learning_rate": 3.9741005252758255e-05, |
|
"loss": 0.6521, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.7424, |
|
"grad_norm": 0.7725234236884608, |
|
"learning_rate": 3.971149999451886e-05, |
|
"loss": 0.65, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.7552, |
|
"grad_norm": 0.6135046864434999, |
|
"learning_rate": 3.9680415662989806e-05, |
|
"loss": 0.6526, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.768, |
|
"grad_norm": 1.0567785588566585, |
|
"learning_rate": 3.9647754748313294e-05, |
|
"loss": 0.6396, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.7808, |
|
"grad_norm": 0.7404946727222057, |
|
"learning_rate": 3.96135198669304e-05, |
|
"loss": 0.64, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.7936, |
|
"grad_norm": 0.6639952047181514, |
|
"learning_rate": 3.957771376137144e-05, |
|
"loss": 0.6573, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.8064, |
|
"grad_norm": 0.7816541565608149, |
|
"learning_rate": 3.954033930003634e-05, |
|
"loss": 0.6432, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.8192, |
|
"grad_norm": 0.8480913553934821, |
|
"learning_rate": 3.9501399476964806e-05, |
|
"loss": 0.6436, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.832, |
|
"grad_norm": 0.7674933034474372, |
|
"learning_rate": 3.946089741159648e-05, |
|
"loss": 0.6415, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.8448, |
|
"grad_norm": 0.6962553280648376, |
|
"learning_rate": 3.9418836348521045e-05, |
|
"loss": 0.6462, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.8576, |
|
"grad_norm": 0.754547010446202, |
|
"learning_rate": 3.937521965721831e-05, |
|
"loss": 0.6475, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.8704, |
|
"grad_norm": 0.6914569903553703, |
|
"learning_rate": 3.933005083178828e-05, |
|
"loss": 0.653, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.8832, |
|
"grad_norm": 0.7183778947689864, |
|
"learning_rate": 3.928333349067125e-05, |
|
"loss": 0.6359, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.896, |
|
"grad_norm": 0.6814811992435078, |
|
"learning_rate": 3.923507137635792e-05, |
|
"loss": 0.64, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.9088, |
|
"grad_norm": 0.7651047855634444, |
|
"learning_rate": 3.9185268355089606e-05, |
|
"loss": 0.6381, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.9216, |
|
"grad_norm": 0.7111755768601328, |
|
"learning_rate": 3.913392841654851e-05, |
|
"loss": 0.6334, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.9344, |
|
"grad_norm": 0.9250555747835286, |
|
"learning_rate": 3.9081055673538093e-05, |
|
"loss": 0.6218, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.9472, |
|
"grad_norm": 0.7731240952783506, |
|
"learning_rate": 3.902665436165364e-05, |
|
"loss": 0.6329, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.8881072363547095, |
|
"learning_rate": 3.897072883894291e-05, |
|
"loss": 0.628, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.9728, |
|
"grad_norm": 0.7660357253718983, |
|
"learning_rate": 3.8913283585557054e-05, |
|
"loss": 0.6406, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.9856, |
|
"grad_norm": 0.9274912155298295, |
|
"learning_rate": 3.885432320339167e-05, |
|
"loss": 0.6324, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.9984, |
|
"grad_norm": 1.230886069065375, |
|
"learning_rate": 3.879385241571817e-05, |
|
"loss": 0.6385, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.0112, |
|
"grad_norm": 0.6536507394497568, |
|
"learning_rate": 3.873187606680543e-05, |
|
"loss": 0.5951, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.024, |
|
"grad_norm": 1.019856000451166, |
|
"learning_rate": 3.866839912153168e-05, |
|
"loss": 0.5657, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.0368, |
|
"grad_norm": 1.1582963989458797, |
|
"learning_rate": 3.860342666498677e-05, |
|
"loss": 0.5762, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.0496, |
|
"grad_norm": 0.7378208821850025, |
|
"learning_rate": 3.853696390206484e-05, |
|
"loss": 0.5495, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.0624, |
|
"grad_norm": 0.9629443781107777, |
|
"learning_rate": 3.846901615704734e-05, |
|
"loss": 0.5622, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.0752, |
|
"grad_norm": 0.8215898497844466, |
|
"learning_rate": 3.839958887317649e-05, |
|
"loss": 0.5651, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.088, |
|
"grad_norm": 0.8062659683557859, |
|
"learning_rate": 3.832868761221926e-05, |
|
"loss": 0.5595, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.1008, |
|
"grad_norm": 0.5974630084173416, |
|
"learning_rate": 3.825631805402182e-05, |
|
"loss": 0.5453, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 1.1136, |
|
"grad_norm": 0.6590847969848663, |
|
"learning_rate": 3.818248599605448e-05, |
|
"loss": 0.5507, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 1.1264, |
|
"grad_norm": 0.6924831332236423, |
|
"learning_rate": 3.810719735294731e-05, |
|
"loss": 0.5676, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.1392, |
|
"grad_norm": 0.72991228553215, |
|
"learning_rate": 3.8030458156016326e-05, |
|
"loss": 0.5577, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.152, |
|
"grad_norm": 0.7421947536546772, |
|
"learning_rate": 3.795227455278029e-05, |
|
"loss": 0.5523, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.1648, |
|
"grad_norm": 0.5816156716395124, |
|
"learning_rate": 3.787265280646825e-05, |
|
"loss": 0.5529, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.1776, |
|
"grad_norm": 0.8598648942477825, |
|
"learning_rate": 3.7791599295517825e-05, |
|
"loss": 0.5513, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.1904, |
|
"grad_norm": 0.7438234185802748, |
|
"learning_rate": 3.7709120513064196e-05, |
|
"loss": 0.5612, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.2032, |
|
"grad_norm": 0.6483403429863566, |
|
"learning_rate": 3.762522306641998e-05, |
|
"loss": 0.5603, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.216, |
|
"grad_norm": 0.7542774522709634, |
|
"learning_rate": 3.7539913676545874e-05, |
|
"loss": 0.5619, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.2288000000000001, |
|
"grad_norm": 0.7461751255762334, |
|
"learning_rate": 3.745319917751229e-05, |
|
"loss": 0.5532, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.2416, |
|
"grad_norm": 0.9624590582142856, |
|
"learning_rate": 3.736508651595188e-05, |
|
"loss": 0.5351, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.2544, |
|
"grad_norm": 0.5633453029603849, |
|
"learning_rate": 3.727558275050301e-05, |
|
"loss": 0.5636, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.2671999999999999, |
|
"grad_norm": 0.7716354751517465, |
|
"learning_rate": 3.718469505124434e-05, |
|
"loss": 0.5589, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.7282392649542447, |
|
"learning_rate": 3.709243069912041e-05, |
|
"loss": 0.5562, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.2928, |
|
"grad_norm": 0.6468222150865308, |
|
"learning_rate": 3.699879708535838e-05, |
|
"loss": 0.5604, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.3056, |
|
"grad_norm": 0.7663780069012006, |
|
"learning_rate": 3.69038017108759e-05, |
|
"loss": 0.5444, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.3184, |
|
"grad_norm": 0.6728408269448793, |
|
"learning_rate": 3.680745218568026e-05, |
|
"loss": 0.5646, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.3312, |
|
"grad_norm": 0.7107729370436198, |
|
"learning_rate": 3.6709756228258735e-05, |
|
"loss": 0.5445, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.3439999999999999, |
|
"grad_norm": 0.6501208118140137, |
|
"learning_rate": 3.6610721664960236e-05, |
|
"loss": 0.5598, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.3568, |
|
"grad_norm": 0.6825664278161179, |
|
"learning_rate": 3.65103564293684e-05, |
|
"loss": 0.5642, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.3696, |
|
"grad_norm": 0.6763795683820295, |
|
"learning_rate": 3.640866856166601e-05, |
|
"loss": 0.5584, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.3824, |
|
"grad_norm": 0.6040370208325733, |
|
"learning_rate": 3.6305666207990886e-05, |
|
"loss": 0.5523, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.3952, |
|
"grad_norm": 0.7594631571840751, |
|
"learning_rate": 3.6201357619783336e-05, |
|
"loss": 0.5391, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.408, |
|
"grad_norm": 0.7008429919676409, |
|
"learning_rate": 3.609575115312511e-05, |
|
"loss": 0.5479, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.4208, |
|
"grad_norm": 0.5904255842137021, |
|
"learning_rate": 3.598885526807003e-05, |
|
"loss": 0.5527, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.4336, |
|
"grad_norm": 0.6422304747709738, |
|
"learning_rate": 3.5880678527966224e-05, |
|
"loss": 0.5403, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.4464000000000001, |
|
"grad_norm": 0.6162161895097944, |
|
"learning_rate": 3.577122959877017e-05, |
|
"loss": 0.5444, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.4592, |
|
"grad_norm": 0.5696385308300248, |
|
"learning_rate": 3.566051724835245e-05, |
|
"loss": 0.5443, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.472, |
|
"grad_norm": 0.5745052658890734, |
|
"learning_rate": 3.554855034579532e-05, |
|
"loss": 0.5556, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.4848, |
|
"grad_norm": 0.4974602909361061, |
|
"learning_rate": 3.5435337860682304e-05, |
|
"loss": 0.5471, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.4976, |
|
"grad_norm": 0.5577532605223752, |
|
"learning_rate": 3.532088886237956e-05, |
|
"loss": 0.5558, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.5104, |
|
"grad_norm": 0.5174391252499035, |
|
"learning_rate": 3.520521251930941e-05, |
|
"loss": 0.5421, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.5232, |
|
"grad_norm": 0.5512260293603698, |
|
"learning_rate": 3.5088318098215805e-05, |
|
"loss": 0.5448, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.536, |
|
"grad_norm": 0.5605182595480298, |
|
"learning_rate": 3.497021496342203e-05, |
|
"loss": 0.5602, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.5488, |
|
"grad_norm": 0.6193684260808888, |
|
"learning_rate": 3.485091257608047e-05, |
|
"loss": 0.5527, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.5615999999999999, |
|
"grad_norm": 0.5553229356906002, |
|
"learning_rate": 3.473042049341474e-05, |
|
"loss": 0.5683, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.5744, |
|
"grad_norm": 0.5850872989870655, |
|
"learning_rate": 3.4608748367954064e-05, |
|
"loss": 0.5488, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.5872000000000002, |
|
"grad_norm": 0.4813388614506171, |
|
"learning_rate": 3.4485905946759965e-05, |
|
"loss": 0.5494, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.6002838304443686, |
|
"learning_rate": 3.4361903070645484e-05, |
|
"loss": 0.5372, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.6128, |
|
"grad_norm": 0.5977081380361673, |
|
"learning_rate": 3.423674967338681e-05, |
|
"loss": 0.5488, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.6256, |
|
"grad_norm": 0.6541333572193411, |
|
"learning_rate": 3.411045578092754e-05, |
|
"loss": 0.5444, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.6383999999999999, |
|
"grad_norm": 0.5951206528492121, |
|
"learning_rate": 3.398303151057543e-05, |
|
"loss": 0.5668, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.6512, |
|
"grad_norm": 0.6221414440418981, |
|
"learning_rate": 3.385448707019199e-05, |
|
"loss": 0.559, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.6640000000000001, |
|
"grad_norm": 0.5934743951126129, |
|
"learning_rate": 3.372483275737468e-05, |
|
"loss": 0.5304, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.6768, |
|
"grad_norm": 0.7838559999591367, |
|
"learning_rate": 3.359407895863199e-05, |
|
"loss": 0.5468, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.6896, |
|
"grad_norm": 0.5268273239649981, |
|
"learning_rate": 3.34622361485514e-05, |
|
"loss": 0.5459, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.7024, |
|
"grad_norm": 0.6092008468432318, |
|
"learning_rate": 3.332931488896029e-05, |
|
"loss": 0.5553, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.7151999999999998, |
|
"grad_norm": 0.497438975374191, |
|
"learning_rate": 3.319532582807977e-05, |
|
"loss": 0.5405, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.728, |
|
"grad_norm": 0.5740690973359671, |
|
"learning_rate": 3.30602796996717e-05, |
|
"loss": 0.5464, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.7408000000000001, |
|
"grad_norm": 0.5221731863629598, |
|
"learning_rate": 3.2924187322178865e-05, |
|
"loss": 0.5404, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.7536, |
|
"grad_norm": 0.5387878340861486, |
|
"learning_rate": 3.278705959785821e-05, |
|
"loss": 0.5535, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.7664, |
|
"grad_norm": 0.5347687783365402, |
|
"learning_rate": 3.2648907511907544e-05, |
|
"loss": 0.5495, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.7792, |
|
"grad_norm": 0.6045959368465982, |
|
"learning_rate": 3.250974213158555e-05, |
|
"loss": 0.5467, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.792, |
|
"grad_norm": 0.4467725422798922, |
|
"learning_rate": 3.23695746053251e-05, |
|
"loss": 0.5553, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.8048, |
|
"grad_norm": 0.6663767836670907, |
|
"learning_rate": 3.222841616184025e-05, |
|
"loss": 0.5539, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.8176, |
|
"grad_norm": 0.4552317360738846, |
|
"learning_rate": 3.208627810922665e-05, |
|
"loss": 0.5469, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.8304, |
|
"grad_norm": 0.6230492875626062, |
|
"learning_rate": 3.194317183405573e-05, |
|
"loss": 0.5452, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.8432, |
|
"grad_norm": 0.4761975761713638, |
|
"learning_rate": 3.1799108800462466e-05, |
|
"loss": 0.5491, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.8559999999999999, |
|
"grad_norm": 0.47978873745405004, |
|
"learning_rate": 3.1654100549227024e-05, |
|
"loss": 0.549, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.8688, |
|
"grad_norm": 0.4432295081859974, |
|
"learning_rate": 3.1508158696850275e-05, |
|
"loss": 0.5347, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.8816000000000002, |
|
"grad_norm": 0.5003305750000421, |
|
"learning_rate": 3.136129493462312e-05, |
|
"loss": 0.5462, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.8944, |
|
"grad_norm": 0.5509077577746613, |
|
"learning_rate": 3.121352102768998e-05, |
|
"loss": 0.5547, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.9072, |
|
"grad_norm": 0.48536813097970655, |
|
"learning_rate": 3.106484881410628e-05, |
|
"loss": 0.5621, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.6461268166103642, |
|
"learning_rate": 3.091529020389009e-05, |
|
"loss": 0.5461, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.9327999999999999, |
|
"grad_norm": 0.5471698672388323, |
|
"learning_rate": 3.076485717806808e-05, |
|
"loss": 0.5545, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.9456, |
|
"grad_norm": 0.5535315618487886, |
|
"learning_rate": 3.061356178771564e-05, |
|
"loss": 0.5473, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.9584000000000001, |
|
"grad_norm": 0.6282730017895743, |
|
"learning_rate": 3.0461416152991555e-05, |
|
"loss": 0.5582, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.9712, |
|
"grad_norm": 0.5067594555727714, |
|
"learning_rate": 3.0308432462167045e-05, |
|
"loss": 0.5674, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.984, |
|
"grad_norm": 0.7060287576955648, |
|
"learning_rate": 3.015462297064936e-05, |
|
"loss": 0.5556, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.9968, |
|
"grad_norm": 0.5237861552370068, |
|
"learning_rate": 3.0000000000000004e-05, |
|
"loss": 0.5375, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 2.0096, |
|
"grad_norm": 0.6678202622563446, |
|
"learning_rate": 2.98445759369477e-05, |
|
"loss": 0.4848, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 2.0224, |
|
"grad_norm": 0.6669750923521043, |
|
"learning_rate": 2.9688363232396056e-05, |
|
"loss": 0.4739, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 2.0352, |
|
"grad_norm": 0.6399431732450359, |
|
"learning_rate": 2.9531374400426158e-05, |
|
"loss": 0.46, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 2.048, |
|
"grad_norm": 0.6751011332197655, |
|
"learning_rate": 2.9373622017294075e-05, |
|
"loss": 0.4612, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.0608, |
|
"grad_norm": 0.5650723452771215, |
|
"learning_rate": 2.9215118720423375e-05, |
|
"loss": 0.4394, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 2.0736, |
|
"grad_norm": 0.586041262485064, |
|
"learning_rate": 2.9055877207392752e-05, |
|
"loss": 0.4426, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 2.0864, |
|
"grad_norm": 0.6366589171610696, |
|
"learning_rate": 2.8895910234918828e-05, |
|
"loss": 0.4408, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 2.0992, |
|
"grad_norm": 0.6201744872862791, |
|
"learning_rate": 2.873523061783426e-05, |
|
"loss": 0.4456, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 2.112, |
|
"grad_norm": 0.5829184961363081, |
|
"learning_rate": 2.8573851228061084e-05, |
|
"loss": 0.467, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 2.1248, |
|
"grad_norm": 0.5952560690768975, |
|
"learning_rate": 2.8411784993579633e-05, |
|
"loss": 0.4319, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 2.1376, |
|
"grad_norm": 0.6156397228610098, |
|
"learning_rate": 2.8249044897392814e-05, |
|
"loss": 0.468, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 2.1504, |
|
"grad_norm": 0.5592355867355706, |
|
"learning_rate": 2.80856439764861e-05, |
|
"loss": 0.448, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 2.1632, |
|
"grad_norm": 0.5627648775048841, |
|
"learning_rate": 2.792159532078314e-05, |
|
"loss": 0.4185, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 2.176, |
|
"grad_norm": 0.5237278883591286, |
|
"learning_rate": 2.77569120720971e-05, |
|
"loss": 0.444, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 2.1888, |
|
"grad_norm": 0.5658592816380061, |
|
"learning_rate": 2.7591607423077932e-05, |
|
"loss": 0.4459, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 2.2016, |
|
"grad_norm": 0.5682196983793549, |
|
"learning_rate": 2.7425694616155474e-05, |
|
"loss": 0.4566, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 2.2144, |
|
"grad_norm": 0.5102529062277341, |
|
"learning_rate": 2.7259186942478656e-05, |
|
"loss": 0.4574, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 2.2272, |
|
"grad_norm": 0.5013276527024729, |
|
"learning_rate": 2.7092097740850712e-05, |
|
"loss": 0.4727, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 0.5133783163473737, |
|
"learning_rate": 2.692444039666066e-05, |
|
"loss": 0.4313, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 2.2528, |
|
"grad_norm": 0.4695179972729291, |
|
"learning_rate": 2.6756228340810946e-05, |
|
"loss": 0.4692, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 2.2656, |
|
"grad_norm": 0.4886224691047343, |
|
"learning_rate": 2.6587475048641596e-05, |
|
"loss": 0.4704, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 2.2784, |
|
"grad_norm": 0.496802153085282, |
|
"learning_rate": 2.6418194038850634e-05, |
|
"loss": 0.4503, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 2.2912, |
|
"grad_norm": 0.5154406753706321, |
|
"learning_rate": 2.624839887241115e-05, |
|
"loss": 0.466, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 2.304, |
|
"grad_norm": 0.5608451939426503, |
|
"learning_rate": 2.607810315148494e-05, |
|
"loss": 0.4587, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 2.3168, |
|
"grad_norm": 0.45659520332211434, |
|
"learning_rate": 2.5907320518332827e-05, |
|
"loss": 0.4564, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 2.3296, |
|
"grad_norm": 0.5227311360549532, |
|
"learning_rate": 2.5736064654221808e-05, |
|
"loss": 0.4461, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 2.3424, |
|
"grad_norm": 0.4387756309191944, |
|
"learning_rate": 2.5564349278329056e-05, |
|
"loss": 0.4479, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 2.3552, |
|
"grad_norm": 0.5367015463063387, |
|
"learning_rate": 2.539218814664288e-05, |
|
"loss": 0.4704, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 2.368, |
|
"grad_norm": 0.47577751129327395, |
|
"learning_rate": 2.521959505086075e-05, |
|
"loss": 0.4644, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 2.3808, |
|
"grad_norm": 0.5384805497260282, |
|
"learning_rate": 2.5046583817284437e-05, |
|
"loss": 0.4353, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 2.3936, |
|
"grad_norm": 0.5171349602083627, |
|
"learning_rate": 2.487316830571244e-05, |
|
"loss": 0.4636, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 2.4064, |
|
"grad_norm": 0.46372877012752045, |
|
"learning_rate": 2.4699362408329646e-05, |
|
"loss": 0.4144, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 2.4192, |
|
"grad_norm": 0.5267695877549445, |
|
"learning_rate": 2.4525180048594452e-05, |
|
"loss": 0.4444, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 2.432, |
|
"grad_norm": 0.5128841134077331, |
|
"learning_rate": 2.435063518012335e-05, |
|
"loss": 0.4597, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 2.4448, |
|
"grad_norm": 0.5101613385632291, |
|
"learning_rate": 2.4175741785573177e-05, |
|
"loss": 0.437, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 2.4576000000000002, |
|
"grad_norm": 0.4980424808938728, |
|
"learning_rate": 2.4000513875520892e-05, |
|
"loss": 0.4524, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 2.4704, |
|
"grad_norm": 0.5019146512115813, |
|
"learning_rate": 2.3824965487341247e-05, |
|
"loss": 0.449, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 2.4832, |
|
"grad_norm": 0.4725650152106896, |
|
"learning_rate": 2.3649110684082258e-05, |
|
"loss": 0.4246, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 2.496, |
|
"grad_norm": 0.4522344461295345, |
|
"learning_rate": 2.3472963553338614e-05, |
|
"loss": 0.4573, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 2.5088, |
|
"grad_norm": 0.4532537198307878, |
|
"learning_rate": 2.3296538206123134e-05, |
|
"loss": 0.4576, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 2.5216, |
|
"grad_norm": 0.38250836302947344, |
|
"learning_rate": 2.311984877573636e-05, |
|
"loss": 0.4478, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 2.5343999999999998, |
|
"grad_norm": 0.4804346749670196, |
|
"learning_rate": 2.2942909416634326e-05, |
|
"loss": 0.4521, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 2.5472, |
|
"grad_norm": 0.42746502313598084, |
|
"learning_rate": 2.2765734303294666e-05, |
|
"loss": 0.4551, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 0.5397281638613642, |
|
"learning_rate": 2.2588337629081107e-05, |
|
"loss": 0.4445, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.5728, |
|
"grad_norm": 0.4036003190873001, |
|
"learning_rate": 2.2410733605106462e-05, |
|
"loss": 0.461, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 2.5856, |
|
"grad_norm": 0.5676833483469378, |
|
"learning_rate": 2.2232936459094158e-05, |
|
"loss": 0.4538, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 2.5984, |
|
"grad_norm": 0.4082650136958347, |
|
"learning_rate": 2.205496043423849e-05, |
|
"loss": 0.4532, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 2.6112, |
|
"grad_norm": 0.5941132393449255, |
|
"learning_rate": 2.1876819788063586e-05, |
|
"loss": 0.4378, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 2.624, |
|
"grad_norm": 0.42177809042160447, |
|
"learning_rate": 2.16985287912813e-05, |
|
"loss": 0.4313, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 2.6368, |
|
"grad_norm": 0.5829494893057481, |
|
"learning_rate": 2.1520101726647922e-05, |
|
"loss": 0.459, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 2.6496, |
|
"grad_norm": 0.44368898960586683, |
|
"learning_rate": 2.1341552887820048e-05, |
|
"loss": 0.4723, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 2.6624, |
|
"grad_norm": 0.5281206973339344, |
|
"learning_rate": 2.1162896578209517e-05, |
|
"loss": 0.43, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 2.6752000000000002, |
|
"grad_norm": 0.4434927023159996, |
|
"learning_rate": 2.0984147109837564e-05, |
|
"loss": 0.4435, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 2.6879999999999997, |
|
"grad_norm": 0.39764211620379664, |
|
"learning_rate": 2.0805318802188307e-05, |
|
"loss": 0.4709, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.7008, |
|
"grad_norm": 0.43281731792688005, |
|
"learning_rate": 2.0626425981061608e-05, |
|
"loss": 0.4246, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 2.7136, |
|
"grad_norm": 0.4076949243522016, |
|
"learning_rate": 2.0447482977425465e-05, |
|
"loss": 0.4426, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 2.7264, |
|
"grad_norm": 0.40937943071521105, |
|
"learning_rate": 2.0268504126267952e-05, |
|
"loss": 0.4488, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 2.7392, |
|
"grad_norm": 0.39395551988457694, |
|
"learning_rate": 2.008950376544887e-05, |
|
"loss": 0.4333, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 2.752, |
|
"grad_norm": 0.4094675463233994, |
|
"learning_rate": 1.9910496234551132e-05, |
|
"loss": 0.4565, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 2.7648, |
|
"grad_norm": 0.4246909053508187, |
|
"learning_rate": 1.9731495873732055e-05, |
|
"loss": 0.4442, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 2.7776, |
|
"grad_norm": 0.4087712133593184, |
|
"learning_rate": 1.9552517022574542e-05, |
|
"loss": 0.4662, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 2.7904, |
|
"grad_norm": 0.41914350963201125, |
|
"learning_rate": 1.93735740189384e-05, |
|
"loss": 0.4334, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 2.8032, |
|
"grad_norm": 0.3792408388470156, |
|
"learning_rate": 1.9194681197811703e-05, |
|
"loss": 0.4591, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 2.816, |
|
"grad_norm": 0.41023209297203744, |
|
"learning_rate": 1.901585289016244e-05, |
|
"loss": 0.4434, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.8288, |
|
"grad_norm": 0.46330559828002704, |
|
"learning_rate": 1.8837103421790486e-05, |
|
"loss": 0.4524, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 2.8416, |
|
"grad_norm": 0.35441939230672964, |
|
"learning_rate": 1.8658447112179952e-05, |
|
"loss": 0.4485, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 2.8544, |
|
"grad_norm": 0.43564932421426406, |
|
"learning_rate": 1.8479898273352084e-05, |
|
"loss": 0.4744, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 2.8672, |
|
"grad_norm": 0.3814117483935704, |
|
"learning_rate": 1.83014712087187e-05, |
|
"loss": 0.4592, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 0.4177127225983221, |
|
"learning_rate": 1.8123180211936417e-05, |
|
"loss": 0.4538, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.8928000000000003, |
|
"grad_norm": 0.42605461927652116, |
|
"learning_rate": 1.794503956576152e-05, |
|
"loss": 0.442, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 2.9055999999999997, |
|
"grad_norm": 0.5298260332878959, |
|
"learning_rate": 1.776706354090585e-05, |
|
"loss": 0.4601, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 2.9184, |
|
"grad_norm": 0.45173150756411196, |
|
"learning_rate": 1.758926639489354e-05, |
|
"loss": 0.4221, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 2.9312, |
|
"grad_norm": 0.500521718892224, |
|
"learning_rate": 1.7411662370918893e-05, |
|
"loss": 0.4453, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 2.944, |
|
"grad_norm": 0.459707412496701, |
|
"learning_rate": 1.7234265696705344e-05, |
|
"loss": 0.4467, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.9568, |
|
"grad_norm": 0.41593816766575187, |
|
"learning_rate": 1.7057090583365678e-05, |
|
"loss": 0.4707, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 2.9696, |
|
"grad_norm": 0.46245731898241266, |
|
"learning_rate": 1.6880151224263646e-05, |
|
"loss": 0.4331, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 2.9824, |
|
"grad_norm": 0.4016276573512303, |
|
"learning_rate": 1.6703461793876876e-05, |
|
"loss": 0.4566, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 2.9952, |
|
"grad_norm": 0.46362716194562703, |
|
"learning_rate": 1.6527036446661396e-05, |
|
"loss": 0.4301, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 3.008, |
|
"grad_norm": 0.5004097429818956, |
|
"learning_rate": 1.635088931591775e-05, |
|
"loss": 0.3689, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 3.0208, |
|
"grad_norm": 0.45397929253767844, |
|
"learning_rate": 1.6175034512658753e-05, |
|
"loss": 0.3781, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 3.0336, |
|
"grad_norm": 0.7279501653096108, |
|
"learning_rate": 1.5999486124479115e-05, |
|
"loss": 0.3607, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 3.0464, |
|
"grad_norm": 0.5271763244671553, |
|
"learning_rate": 1.5824258214426833e-05, |
|
"loss": 0.3798, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 3.0592, |
|
"grad_norm": 0.6230623123260388, |
|
"learning_rate": 1.5649364819876655e-05, |
|
"loss": 0.3588, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 3.072, |
|
"grad_norm": 0.4691487581943571, |
|
"learning_rate": 1.547481995140556e-05, |
|
"loss": 0.374, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 3.0848, |
|
"grad_norm": 0.5267052496426081, |
|
"learning_rate": 1.5300637591670357e-05, |
|
"loss": 0.3706, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 3.0976, |
|
"grad_norm": 0.5067182048115253, |
|
"learning_rate": 1.5126831694287564e-05, |
|
"loss": 0.3671, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 3.1104, |
|
"grad_norm": 0.421278553921468, |
|
"learning_rate": 1.4953416182715566e-05, |
|
"loss": 0.3621, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 3.1232, |
|
"grad_norm": 0.4343364134376989, |
|
"learning_rate": 1.478040494913926e-05, |
|
"loss": 0.3757, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 3.136, |
|
"grad_norm": 0.4524399849288063, |
|
"learning_rate": 1.460781185335713e-05, |
|
"loss": 0.3695, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 3.1488, |
|
"grad_norm": 0.39338140551630585, |
|
"learning_rate": 1.443565072167095e-05, |
|
"loss": 0.3601, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 3.1616, |
|
"grad_norm": 0.4365587466463919, |
|
"learning_rate": 1.4263935345778202e-05, |
|
"loss": 0.3577, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 3.1744, |
|
"grad_norm": 0.4234946069608991, |
|
"learning_rate": 1.409267948166718e-05, |
|
"loss": 0.3479, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 3.1872, |
|
"grad_norm": 0.40138255715707816, |
|
"learning_rate": 1.3921896848515064e-05, |
|
"loss": 0.3643, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 0.40135399562351226, |
|
"learning_rate": 1.3751601127588849e-05, |
|
"loss": 0.3787, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 3.2128, |
|
"grad_norm": 0.3790300509466168, |
|
"learning_rate": 1.3581805961149371e-05, |
|
"loss": 0.3707, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 3.2256, |
|
"grad_norm": 0.41758458935097315, |
|
"learning_rate": 1.341252495135841e-05, |
|
"loss": 0.3905, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 3.2384, |
|
"grad_norm": 0.35407080309322747, |
|
"learning_rate": 1.324377165918906e-05, |
|
"loss": 0.369, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 3.2512, |
|
"grad_norm": 0.40656317353230165, |
|
"learning_rate": 1.3075559603339354e-05, |
|
"loss": 0.3549, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 3.2640000000000002, |
|
"grad_norm": 0.420463928822835, |
|
"learning_rate": 1.2907902259149287e-05, |
|
"loss": 0.3326, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 3.2768, |
|
"grad_norm": 0.35160892607322725, |
|
"learning_rate": 1.274081305752135e-05, |
|
"loss": 0.396, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 3.2896, |
|
"grad_norm": 0.423290535153289, |
|
"learning_rate": 1.2574305383844528e-05, |
|
"loss": 0.3702, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 3.3024, |
|
"grad_norm": 0.3323483152890749, |
|
"learning_rate": 1.2408392576922075e-05, |
|
"loss": 0.3733, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 3.3152, |
|
"grad_norm": 0.382062003788085, |
|
"learning_rate": 1.2243087927902905e-05, |
|
"loss": 0.3652, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 3.328, |
|
"grad_norm": 0.3527822057086127, |
|
"learning_rate": 1.2078404679216864e-05, |
|
"loss": 0.3505, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 3.3407999999999998, |
|
"grad_norm": 0.31132924993032884, |
|
"learning_rate": 1.1914356023513904e-05, |
|
"loss": 0.3574, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 3.3536, |
|
"grad_norm": 0.38023126218354447, |
|
"learning_rate": 1.1750955102607193e-05, |
|
"loss": 0.3845, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 3.3664, |
|
"grad_norm": 0.3760193075816827, |
|
"learning_rate": 1.1588215006420374e-05, |
|
"loss": 0.3913, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 3.3792, |
|
"grad_norm": 0.3584397701304726, |
|
"learning_rate": 1.1426148771938915e-05, |
|
"loss": 0.3488, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 3.392, |
|
"grad_norm": 0.3730714526513745, |
|
"learning_rate": 1.1264769382165748e-05, |
|
"loss": 0.3543, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 3.4048, |
|
"grad_norm": 0.3579123444897427, |
|
"learning_rate": 1.110408976508118e-05, |
|
"loss": 0.3525, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 3.4176, |
|
"grad_norm": 0.37105707673062116, |
|
"learning_rate": 1.094412279260726e-05, |
|
"loss": 0.3747, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 3.4304, |
|
"grad_norm": 0.3293600206042143, |
|
"learning_rate": 1.0784881279576635e-05, |
|
"loss": 0.397, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 3.4432, |
|
"grad_norm": 0.3390868639291035, |
|
"learning_rate": 1.0626377982705929e-05, |
|
"loss": 0.353, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 3.456, |
|
"grad_norm": 0.3280116889287868, |
|
"learning_rate": 1.0468625599573842e-05, |
|
"loss": 0.3698, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 3.4688, |
|
"grad_norm": 0.3577592658215991, |
|
"learning_rate": 1.0311636767603952e-05, |
|
"loss": 0.3429, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 3.4816, |
|
"grad_norm": 0.34859372800510313, |
|
"learning_rate": 1.0155424063052306e-05, |
|
"loss": 0.3464, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 3.4944, |
|
"grad_norm": 0.31023285833474384, |
|
"learning_rate": 1.0000000000000006e-05, |
|
"loss": 0.3851, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 3.5072, |
|
"grad_norm": 0.3364000022645315, |
|
"learning_rate": 9.84537702935065e-06, |
|
"loss": 0.356, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"grad_norm": 0.3321914029103981, |
|
"learning_rate": 9.691567537832964e-06, |
|
"loss": 0.3605, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 3.5328, |
|
"grad_norm": 0.3258934278472488, |
|
"learning_rate": 9.538583847008452e-06, |
|
"loss": 0.3916, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 3.5456, |
|
"grad_norm": 0.3584353252301879, |
|
"learning_rate": 9.386438212284372e-06, |
|
"loss": 0.3623, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 3.5584, |
|
"grad_norm": 0.31423092055549057, |
|
"learning_rate": 9.235142821931928e-06, |
|
"loss": 0.3722, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 3.5712, |
|
"grad_norm": 0.3689395382305111, |
|
"learning_rate": 9.084709796109907e-06, |
|
"loss": 0.3616, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 3.584, |
|
"grad_norm": 0.3533964902772209, |
|
"learning_rate": 8.93515118589373e-06, |
|
"loss": 0.3708, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 3.5968, |
|
"grad_norm": 0.3346117111839511, |
|
"learning_rate": 8.786478972310023e-06, |
|
"loss": 0.3932, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 3.6096, |
|
"grad_norm": 0.31607262685467563, |
|
"learning_rate": 8.638705065376887e-06, |
|
"loss": 0.3931, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 3.6224, |
|
"grad_norm": 0.3382646333246466, |
|
"learning_rate": 8.491841303149728e-06, |
|
"loss": 0.3719, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 3.6352, |
|
"grad_norm": 0.3262881762850147, |
|
"learning_rate": 8.345899450772975e-06, |
|
"loss": 0.3565, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 3.648, |
|
"grad_norm": 0.31639797972938905, |
|
"learning_rate": 8.200891199537549e-06, |
|
"loss": 0.3621, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 3.6608, |
|
"grad_norm": 0.3240547796655519, |
|
"learning_rate": 8.056828165944282e-06, |
|
"loss": 0.3632, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 3.6736, |
|
"grad_norm": 0.34275181674679317, |
|
"learning_rate": 7.913721890773354e-06, |
|
"loss": 0.3849, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 3.6864, |
|
"grad_norm": 0.3453184261266086, |
|
"learning_rate": 7.771583838159756e-06, |
|
"loss": 0.3758, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 3.6992000000000003, |
|
"grad_norm": 0.34071235830320895, |
|
"learning_rate": 7.630425394674903e-06, |
|
"loss": 0.3414, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 3.7119999999999997, |
|
"grad_norm": 0.3255810804275077, |
|
"learning_rate": 7.49025786841445e-06, |
|
"loss": 0.3723, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 3.7248, |
|
"grad_norm": 0.3287782006959308, |
|
"learning_rate": 7.3510924880924575e-06, |
|
"loss": 0.378, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 3.7376, |
|
"grad_norm": 0.34162576634422614, |
|
"learning_rate": 7.212940402141808e-06, |
|
"loss": 0.3785, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 3.7504, |
|
"grad_norm": 0.32365384074358655, |
|
"learning_rate": 7.075812677821145e-06, |
|
"loss": 0.354, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 3.7632, |
|
"grad_norm": 0.29539672261333944, |
|
"learning_rate": 6.939720300328303e-06, |
|
"loss": 0.371, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 3.776, |
|
"grad_norm": 0.3024603487823895, |
|
"learning_rate": 6.8046741719202385e-06, |
|
"loss": 0.3501, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 3.7888, |
|
"grad_norm": 0.31182858580612854, |
|
"learning_rate": 6.67068511103971e-06, |
|
"loss": 0.3709, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 3.8016, |
|
"grad_norm": 0.35091428277028275, |
|
"learning_rate": 6.537763851448593e-06, |
|
"loss": 0.3913, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 3.8144, |
|
"grad_norm": 0.3319035157120333, |
|
"learning_rate": 6.4059210413680175e-06, |
|
"loss": 0.362, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 3.8272, |
|
"grad_norm": 0.29656496926230397, |
|
"learning_rate": 6.275167242625331e-06, |
|
"loss": 0.3658, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"grad_norm": 0.2998731990744445, |
|
"learning_rate": 6.145512929808013e-06, |
|
"loss": 0.3909, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 3.8528000000000002, |
|
"grad_norm": 0.3312793264864784, |
|
"learning_rate": 6.016968489424572e-06, |
|
"loss": 0.3556, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 3.8656, |
|
"grad_norm": 0.30593350276281084, |
|
"learning_rate": 5.889544219072465e-06, |
|
"loss": 0.3557, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 3.8784, |
|
"grad_norm": 0.3194680377559201, |
|
"learning_rate": 5.7632503266131925e-06, |
|
"loss": 0.3356, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 3.8912, |
|
"grad_norm": 0.28711000905224604, |
|
"learning_rate": 5.638096929354522e-06, |
|
"loss": 0.3617, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 3.904, |
|
"grad_norm": 0.2836580888304628, |
|
"learning_rate": 5.514094053240035e-06, |
|
"loss": 0.3621, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 3.9168, |
|
"grad_norm": 0.29883718617423993, |
|
"learning_rate": 5.39125163204594e-06, |
|
"loss": 0.3673, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 3.9295999999999998, |
|
"grad_norm": 0.31604534065967155, |
|
"learning_rate": 5.269579506585259e-06, |
|
"loss": 0.3575, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 3.9424, |
|
"grad_norm": 0.3029471310685604, |
|
"learning_rate": 5.149087423919541e-06, |
|
"loss": 0.3903, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 3.9552, |
|
"grad_norm": 0.2966706945691347, |
|
"learning_rate": 5.029785036577976e-06, |
|
"loss": 0.3574, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 3.968, |
|
"grad_norm": 0.2764131067907005, |
|
"learning_rate": 4.911681901784198e-06, |
|
"loss": 0.3849, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 3.9808, |
|
"grad_norm": 0.31223514877690434, |
|
"learning_rate": 4.794787480690597e-06, |
|
"loss": 0.3744, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 3.9936, |
|
"grad_norm": 0.29567041539338607, |
|
"learning_rate": 4.679111137620442e-06, |
|
"loss": 0.3415, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 4.0064, |
|
"grad_norm": 0.3504411331683098, |
|
"learning_rate": 4.5646621393177e-06, |
|
"loss": 0.3231, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 4.0192, |
|
"grad_norm": 0.3931568680835711, |
|
"learning_rate": 4.451449654204685e-06, |
|
"loss": 0.3025, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 4.032, |
|
"grad_norm": 0.28936012811319806, |
|
"learning_rate": 4.339482751647557e-06, |
|
"loss": 0.3507, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 4.0448, |
|
"grad_norm": 0.32407742733998635, |
|
"learning_rate": 4.228770401229824e-06, |
|
"loss": 0.3099, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 4.0576, |
|
"grad_norm": 0.4680933320551794, |
|
"learning_rate": 4.119321472033779e-06, |
|
"loss": 0.317, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 4.0704, |
|
"grad_norm": 0.4813797502731172, |
|
"learning_rate": 4.011144731929981e-06, |
|
"loss": 0.353, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 4.0832, |
|
"grad_norm": 0.3068835831880434, |
|
"learning_rate": 3.904248846874894e-06, |
|
"loss": 0.293, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 4.096, |
|
"grad_norm": 0.351051620730367, |
|
"learning_rate": 3.7986423802166705e-06, |
|
"loss": 0.2994, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 4.1088, |
|
"grad_norm": 0.37657202784876687, |
|
"learning_rate": 3.694333792009115e-06, |
|
"loss": 0.288, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 4.1216, |
|
"grad_norm": 0.340429910116198, |
|
"learning_rate": 3.5913314383339937e-06, |
|
"loss": 0.2945, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 4.1344, |
|
"grad_norm": 0.27336555580482963, |
|
"learning_rate": 3.4896435706316e-06, |
|
"loss": 0.3367, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 4.1472, |
|
"grad_norm": 0.2882006523229666, |
|
"learning_rate": 3.3892783350397675e-06, |
|
"loss": 0.3352, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"grad_norm": 0.34620306890696, |
|
"learning_rate": 3.290243771741275e-06, |
|
"loss": 0.3337, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 4.1728, |
|
"grad_norm": 0.32192969905720603, |
|
"learning_rate": 3.1925478143197418e-06, |
|
"loss": 0.2992, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 4.1856, |
|
"grad_norm": 0.2884580223975795, |
|
"learning_rate": 3.0961982891241083e-06, |
|
"loss": 0.3486, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 4.1984, |
|
"grad_norm": 0.27459186006874053, |
|
"learning_rate": 3.001202914641628e-06, |
|
"loss": 0.3206, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 4.2112, |
|
"grad_norm": 0.2870978017024338, |
|
"learning_rate": 2.907569300879596e-06, |
|
"loss": 0.3264, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 4.224, |
|
"grad_norm": 0.3061825714793093, |
|
"learning_rate": 2.815304948755664e-06, |
|
"loss": 0.3021, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 4.2368, |
|
"grad_norm": 0.28007327004321064, |
|
"learning_rate": 2.7244172494969978e-06, |
|
"loss": 0.2989, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 4.2496, |
|
"grad_norm": 0.3073486540360964, |
|
"learning_rate": 2.6349134840481294e-06, |
|
"loss": 0.2884, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 4.2624, |
|
"grad_norm": 0.26515751186571046, |
|
"learning_rate": 2.546800822487714e-06, |
|
"loss": 0.328, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 4.2752, |
|
"grad_norm": 0.26092068470132884, |
|
"learning_rate": 2.4600863234541338e-06, |
|
"loss": 0.3155, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 4.288, |
|
"grad_norm": 0.2748074640925594, |
|
"learning_rate": 2.374776933580025e-06, |
|
"loss": 0.3059, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 4.3008, |
|
"grad_norm": 0.2860285883258981, |
|
"learning_rate": 2.2908794869358044e-06, |
|
"loss": 0.3278, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 4.3136, |
|
"grad_norm": 0.27360963003754485, |
|
"learning_rate": 2.2084007044821764e-06, |
|
"loss": 0.3202, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 4.3264, |
|
"grad_norm": 0.2625289125272169, |
|
"learning_rate": 2.127347193531757e-06, |
|
"loss": 0.3045, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 4.3392, |
|
"grad_norm": 0.2677590682338024, |
|
"learning_rate": 2.0477254472197237e-06, |
|
"loss": 0.3413, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 4.352, |
|
"grad_norm": 0.2567974519755871, |
|
"learning_rate": 1.96954184398368e-06, |
|
"loss": 0.3571, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 4.3648, |
|
"grad_norm": 0.25401556042640333, |
|
"learning_rate": 1.8928026470526917e-06, |
|
"loss": 0.3319, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 4.3776, |
|
"grad_norm": 0.24749745478560123, |
|
"learning_rate": 1.817514003945524e-06, |
|
"loss": 0.3153, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 4.3904, |
|
"grad_norm": 0.26422956481216253, |
|
"learning_rate": 1.743681945978184e-06, |
|
"loss": 0.3258, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 4.4032, |
|
"grad_norm": 0.24372985394844288, |
|
"learning_rate": 1.6713123877807413e-06, |
|
"loss": 0.3031, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 4.416, |
|
"grad_norm": 0.2581077508191922, |
|
"learning_rate": 1.6004111268235156e-06, |
|
"loss": 0.3085, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 4.4288, |
|
"grad_norm": 0.25038284641790104, |
|
"learning_rate": 1.5309838429526714e-06, |
|
"loss": 0.3295, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 4.4416, |
|
"grad_norm": 0.28978185708152177, |
|
"learning_rate": 1.4630360979351644e-06, |
|
"loss": 0.2994, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 4.4544, |
|
"grad_norm": 0.24794574801242528, |
|
"learning_rate": 1.396573335013236e-06, |
|
"loss": 0.3105, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 4.4672, |
|
"grad_norm": 0.25624266514753086, |
|
"learning_rate": 1.3316008784683265e-06, |
|
"loss": 0.3173, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"grad_norm": 0.24324806761672718, |
|
"learning_rate": 1.2681239331945695e-06, |
|
"loss": 0.3235, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 4.4928, |
|
"grad_norm": 0.25149631251423993, |
|
"learning_rate": 1.2061475842818337e-06, |
|
"loss": 0.3042, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 4.5056, |
|
"grad_norm": 0.23759462417106564, |
|
"learning_rate": 1.1456767966083393e-06, |
|
"loss": 0.3204, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 4.5184, |
|
"grad_norm": 0.2500928218978635, |
|
"learning_rate": 1.086716414442952e-06, |
|
"loss": 0.3411, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 4.5312, |
|
"grad_norm": 0.24291903474326795, |
|
"learning_rate": 1.0292711610570904e-06, |
|
"loss": 0.3319, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 4.5440000000000005, |
|
"grad_norm": 0.23988922674456525, |
|
"learning_rate": 9.733456383463658e-07, |
|
"loss": 0.3234, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 4.5568, |
|
"grad_norm": 0.24176744472261644, |
|
"learning_rate": 9.189443264619102e-07, |
|
"loss": 0.3401, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 4.5696, |
|
"grad_norm": 0.2476012377698525, |
|
"learning_rate": 8.660715834514977e-07, |
|
"loss": 0.3088, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 4.5824, |
|
"grad_norm": 0.24029504916945307, |
|
"learning_rate": 8.147316449103959e-07, |
|
"loss": 0.3153, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 4.5952, |
|
"grad_norm": 0.23741894121230409, |
|
"learning_rate": 7.649286236420806e-07, |
|
"loss": 0.3216, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 4.608, |
|
"grad_norm": 0.24800566532952512, |
|
"learning_rate": 7.166665093287539e-07, |
|
"loss": 0.3093, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 4.6208, |
|
"grad_norm": 0.243947415013688, |
|
"learning_rate": 6.69949168211721e-07, |
|
"loss": 0.3091, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 4.6336, |
|
"grad_norm": 0.2525272399066849, |
|
"learning_rate": 6.247803427816945e-07, |
|
"loss": 0.2905, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 4.6464, |
|
"grad_norm": 0.2534656791817323, |
|
"learning_rate": 5.811636514789598e-07, |
|
"loss": 0.2995, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 4.6592, |
|
"grad_norm": 0.2430105888149135, |
|
"learning_rate": 5.391025884035239e-07, |
|
"loss": 0.2783, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 4.672, |
|
"grad_norm": 0.23569535625632956, |
|
"learning_rate": 4.986005230351954e-07, |
|
"loss": 0.3508, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 4.6848, |
|
"grad_norm": 0.2540291353288265, |
|
"learning_rate": 4.5966069996365993e-07, |
|
"loss": 0.3058, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 4.6975999999999996, |
|
"grad_norm": 0.2380598216424529, |
|
"learning_rate": 4.22286238628562e-07, |
|
"loss": 0.3228, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 4.7104, |
|
"grad_norm": 0.24963221995398183, |
|
"learning_rate": 3.8648013306960664e-07, |
|
"loss": 0.3045, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 4.7232, |
|
"grad_norm": 0.24346615558606977, |
|
"learning_rate": 3.522452516867048e-07, |
|
"loss": 0.308, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 4.736, |
|
"grad_norm": 0.23401178265699293, |
|
"learning_rate": 3.1958433701019697e-07, |
|
"loss": 0.3214, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 4.7488, |
|
"grad_norm": 0.25175625041590033, |
|
"learning_rate": 2.8850000548115155e-07, |
|
"loss": 0.3227, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 4.7616, |
|
"grad_norm": 0.2391601883627694, |
|
"learning_rate": 2.5899474724174313e-07, |
|
"loss": 0.3052, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 4.7744, |
|
"grad_norm": 0.23348992719137646, |
|
"learning_rate": 2.3107092593579905e-07, |
|
"loss": 0.3402, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 4.7872, |
|
"grad_norm": 0.23720217424937548, |
|
"learning_rate": 2.0473077851942858e-07, |
|
"loss": 0.325, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"grad_norm": 0.2445933538130364, |
|
"learning_rate": 1.799764150818306e-07, |
|
"loss": 0.3366, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 4.8128, |
|
"grad_norm": 0.23489019631785116, |
|
"learning_rate": 1.5680981867625566e-07, |
|
"loss": 0.3392, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 4.8256, |
|
"grad_norm": 0.2374303126311836, |
|
"learning_rate": 1.3523284516113955e-07, |
|
"loss": 0.2974, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 4.8384, |
|
"grad_norm": 0.2365588987551183, |
|
"learning_rate": 1.1524722305144231e-07, |
|
"loss": 0.3141, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 4.8512, |
|
"grad_norm": 0.23431121077879877, |
|
"learning_rate": 9.685455338016347e-08, |
|
"loss": 0.3358, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 4.864, |
|
"grad_norm": 0.23021249846944067, |
|
"learning_rate": 8.005630957010014e-08, |
|
"loss": 0.3114, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 4.8768, |
|
"grad_norm": 0.2455488381419304, |
|
"learning_rate": 6.485383731580142e-08, |
|
"loss": 0.3149, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 4.8896, |
|
"grad_norm": 0.23795742806289083, |
|
"learning_rate": 5.1248354475768034e-08, |
|
"loss": 0.3388, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 4.9024, |
|
"grad_norm": 0.2332652517526661, |
|
"learning_rate": 3.924095097489922e-08, |
|
"loss": 0.3179, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 4.9152000000000005, |
|
"grad_norm": 0.24280066249605625, |
|
"learning_rate": 2.8832588717164766e-08, |
|
"loss": 0.2996, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 4.928, |
|
"grad_norm": 0.2546053075094607, |
|
"learning_rate": 2.0024101508555604e-08, |
|
"loss": 0.295, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 4.9408, |
|
"grad_norm": 0.24792933487577504, |
|
"learning_rate": 1.281619499029274e-08, |
|
"loss": 0.3035, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 4.9536, |
|
"grad_norm": 0.2469568104249538, |
|
"learning_rate": 7.209446582292501e-09, |
|
"loss": 0.2965, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 4.9664, |
|
"grad_norm": 0.22797976337429476, |
|
"learning_rate": 3.2043054369057523e-09, |
|
"loss": 0.34, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 4.9792, |
|
"grad_norm": 0.2429980732264221, |
|
"learning_rate": 8.010924029533406e-10, |
|
"loss": 0.2919, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 4.992, |
|
"grad_norm": 0.2468825131784473, |
|
"learning_rate": 0.0, |
|
"loss": 0.3017, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 4.992, |
|
"step": 390, |
|
"total_flos": 1.349858349918716e+18, |
|
"train_loss": 0.48315360164031, |
|
"train_runtime": 57610.018, |
|
"train_samples_per_second": 0.868, |
|
"train_steps_per_second": 0.007 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 390, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.349858349918716e+18, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|