|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.9952, |
|
"eval_steps": 500, |
|
"global_step": 312, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0096, |
|
"grad_norm": 6.164039611816406, |
|
"learning_rate": 3.125e-07, |
|
"loss": 1.008, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0192, |
|
"grad_norm": 6.292746067047119, |
|
"learning_rate": 6.25e-07, |
|
"loss": 1.0149, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0288, |
|
"grad_norm": 6.367922306060791, |
|
"learning_rate": 9.375000000000001e-07, |
|
"loss": 1.026, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0384, |
|
"grad_norm": 6.2663187980651855, |
|
"learning_rate": 1.25e-06, |
|
"loss": 1.0154, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.048, |
|
"grad_norm": 6.17635440826416, |
|
"learning_rate": 1.5625e-06, |
|
"loss": 1.0091, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0576, |
|
"grad_norm": 5.536340236663818, |
|
"learning_rate": 1.8750000000000003e-06, |
|
"loss": 0.9624, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0672, |
|
"grad_norm": 4.604569435119629, |
|
"learning_rate": 2.1875000000000002e-06, |
|
"loss": 0.95, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0768, |
|
"grad_norm": 4.306362152099609, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.9617, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0864, |
|
"grad_norm": 2.4510245323181152, |
|
"learning_rate": 2.8125e-06, |
|
"loss": 0.921, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.096, |
|
"grad_norm": 2.3514862060546875, |
|
"learning_rate": 3.125e-06, |
|
"loss": 0.8574, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.1056, |
|
"grad_norm": 2.2417752742767334, |
|
"learning_rate": 3.4375e-06, |
|
"loss": 0.9179, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.1152, |
|
"grad_norm": 3.4170384407043457, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 0.8742, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.1248, |
|
"grad_norm": 4.054645538330078, |
|
"learning_rate": 4.0625000000000005e-06, |
|
"loss": 0.8919, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.1344, |
|
"grad_norm": 3.9020988941192627, |
|
"learning_rate": 4.3750000000000005e-06, |
|
"loss": 0.8858, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.144, |
|
"grad_norm": 3.5906124114990234, |
|
"learning_rate": 4.6875000000000004e-06, |
|
"loss": 0.8746, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.1536, |
|
"grad_norm": 2.614440679550171, |
|
"learning_rate": 5e-06, |
|
"loss": 0.866, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.1632, |
|
"grad_norm": 2.242814540863037, |
|
"learning_rate": 5.3125e-06, |
|
"loss": 0.8143, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.1728, |
|
"grad_norm": 1.921581745147705, |
|
"learning_rate": 5.625e-06, |
|
"loss": 0.8119, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.1824, |
|
"grad_norm": 1.758897066116333, |
|
"learning_rate": 5.9375e-06, |
|
"loss": 0.8001, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.192, |
|
"grad_norm": 1.4624104499816895, |
|
"learning_rate": 6.25e-06, |
|
"loss": 0.7916, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.2016, |
|
"grad_norm": 1.2528955936431885, |
|
"learning_rate": 6.5625e-06, |
|
"loss": 0.7971, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.2112, |
|
"grad_norm": 1.2485564947128296, |
|
"learning_rate": 6.875e-06, |
|
"loss": 0.7607, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.2208, |
|
"grad_norm": 1.1729601621627808, |
|
"learning_rate": 7.1875e-06, |
|
"loss": 0.7394, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.2304, |
|
"grad_norm": 1.1565651893615723, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.7683, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.0047582387924194, |
|
"learning_rate": 7.8125e-06, |
|
"loss": 0.7456, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.2496, |
|
"grad_norm": 1.0549741983413696, |
|
"learning_rate": 8.125000000000001e-06, |
|
"loss": 0.7246, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.2592, |
|
"grad_norm": 1.0340930223464966, |
|
"learning_rate": 8.4375e-06, |
|
"loss": 0.7379, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.2688, |
|
"grad_norm": 0.805397093296051, |
|
"learning_rate": 8.750000000000001e-06, |
|
"loss": 0.7172, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.2784, |
|
"grad_norm": 0.8800445795059204, |
|
"learning_rate": 9.0625e-06, |
|
"loss": 0.7218, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.288, |
|
"grad_norm": 0.9208349585533142, |
|
"learning_rate": 9.375000000000001e-06, |
|
"loss": 0.7242, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.2976, |
|
"grad_norm": 0.7508711814880371, |
|
"learning_rate": 9.6875e-06, |
|
"loss": 0.6823, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.3072, |
|
"grad_norm": 0.8096818327903748, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7239, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.3168, |
|
"grad_norm": 0.7590523362159729, |
|
"learning_rate": 9.999685283773504e-06, |
|
"loss": 0.6874, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.3264, |
|
"grad_norm": 0.7550079226493835, |
|
"learning_rate": 9.998741174712534e-06, |
|
"loss": 0.7055, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.336, |
|
"grad_norm": 0.7203152775764465, |
|
"learning_rate": 9.997167791667668e-06, |
|
"loss": 0.6942, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.3456, |
|
"grad_norm": 0.7001503705978394, |
|
"learning_rate": 9.994965332706574e-06, |
|
"loss": 0.7124, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.3552, |
|
"grad_norm": 0.7050896286964417, |
|
"learning_rate": 9.992134075089085e-06, |
|
"loss": 0.7075, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.3648, |
|
"grad_norm": 0.6661181449890137, |
|
"learning_rate": 9.98867437523228e-06, |
|
"loss": 0.7181, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.3744, |
|
"grad_norm": 0.6402677893638611, |
|
"learning_rate": 9.984586668665641e-06, |
|
"loss": 0.6979, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.384, |
|
"grad_norm": 0.6823920607566833, |
|
"learning_rate": 9.979871469976197e-06, |
|
"loss": 0.6876, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.3936, |
|
"grad_norm": 0.6096172332763672, |
|
"learning_rate": 9.974529372743762e-06, |
|
"loss": 0.6851, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.4032, |
|
"grad_norm": 0.5798601508140564, |
|
"learning_rate": 9.968561049466214e-06, |
|
"loss": 0.6735, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.4128, |
|
"grad_norm": 0.6932426691055298, |
|
"learning_rate": 9.961967251474823e-06, |
|
"loss": 0.6874, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.4224, |
|
"grad_norm": 0.5851962566375732, |
|
"learning_rate": 9.954748808839675e-06, |
|
"loss": 0.6558, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.432, |
|
"grad_norm": 0.6512772440910339, |
|
"learning_rate": 9.946906630265184e-06, |
|
"loss": 0.6932, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.4416, |
|
"grad_norm": 0.6203194856643677, |
|
"learning_rate": 9.938441702975689e-06, |
|
"loss": 0.6569, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.4512, |
|
"grad_norm": 0.5843777656555176, |
|
"learning_rate": 9.92935509259118e-06, |
|
"loss": 0.6916, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.4608, |
|
"grad_norm": 0.6552630662918091, |
|
"learning_rate": 9.91964794299315e-06, |
|
"loss": 0.6927, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.4704, |
|
"grad_norm": 0.5897120237350464, |
|
"learning_rate": 9.909321476180594e-06, |
|
"loss": 0.6418, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.6383408308029175, |
|
"learning_rate": 9.898376992116179e-06, |
|
"loss": 0.6697, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.4896, |
|
"grad_norm": 0.5719790458679199, |
|
"learning_rate": 9.886815868562596e-06, |
|
"loss": 0.6535, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.4992, |
|
"grad_norm": 0.6388934850692749, |
|
"learning_rate": 9.874639560909118e-06, |
|
"loss": 0.6738, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.5088, |
|
"grad_norm": 0.6141742467880249, |
|
"learning_rate": 9.861849601988384e-06, |
|
"loss": 0.6844, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.5184, |
|
"grad_norm": 0.710845410823822, |
|
"learning_rate": 9.848447601883436e-06, |
|
"loss": 0.6897, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.528, |
|
"grad_norm": 0.5908008813858032, |
|
"learning_rate": 9.834435247725032e-06, |
|
"loss": 0.6598, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.5376, |
|
"grad_norm": 0.7284044027328491, |
|
"learning_rate": 9.819814303479268e-06, |
|
"loss": 0.6383, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.5472, |
|
"grad_norm": 0.7161381840705872, |
|
"learning_rate": 9.804586609725499e-06, |
|
"loss": 0.6667, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.5568, |
|
"grad_norm": 0.6291981339454651, |
|
"learning_rate": 9.788754083424654e-06, |
|
"loss": 0.6504, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.5664, |
|
"grad_norm": 0.5240605473518372, |
|
"learning_rate": 9.772318717677905e-06, |
|
"loss": 0.6343, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.576, |
|
"grad_norm": 0.586787760257721, |
|
"learning_rate": 9.755282581475769e-06, |
|
"loss": 0.6418, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.5856, |
|
"grad_norm": 0.5460494160652161, |
|
"learning_rate": 9.737647819437645e-06, |
|
"loss": 0.644, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.5952, |
|
"grad_norm": 0.6215012073516846, |
|
"learning_rate": 9.719416651541839e-06, |
|
"loss": 0.6532, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.6048, |
|
"grad_norm": 0.5574711561203003, |
|
"learning_rate": 9.700591372846096e-06, |
|
"loss": 0.6262, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.6144, |
|
"grad_norm": 0.5710508823394775, |
|
"learning_rate": 9.681174353198687e-06, |
|
"loss": 0.6576, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.624, |
|
"grad_norm": 0.5259015560150146, |
|
"learning_rate": 9.661168036940071e-06, |
|
"loss": 0.6518, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.6336, |
|
"grad_norm": 0.6580711007118225, |
|
"learning_rate": 9.640574942595195e-06, |
|
"loss": 0.6315, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.6432, |
|
"grad_norm": 0.5900007486343384, |
|
"learning_rate": 9.619397662556434e-06, |
|
"loss": 0.6615, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.6528, |
|
"grad_norm": 0.636417031288147, |
|
"learning_rate": 9.597638862757255e-06, |
|
"loss": 0.6511, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.6624, |
|
"grad_norm": 0.6361602544784546, |
|
"learning_rate": 9.5753012823366e-06, |
|
"loss": 0.6523, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.672, |
|
"grad_norm": 0.6292795538902283, |
|
"learning_rate": 9.552387733294081e-06, |
|
"loss": 0.6317, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.6816, |
|
"grad_norm": 0.6038651466369629, |
|
"learning_rate": 9.528901100135971e-06, |
|
"loss": 0.6424, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.6912, |
|
"grad_norm": 0.5914206504821777, |
|
"learning_rate": 9.504844339512096e-06, |
|
"loss": 0.6868, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.7008, |
|
"grad_norm": 0.6082513928413391, |
|
"learning_rate": 9.480220479843627e-06, |
|
"loss": 0.6638, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.7104, |
|
"grad_norm": 0.5032195448875427, |
|
"learning_rate": 9.45503262094184e-06, |
|
"loss": 0.6381, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.5890430212020874, |
|
"learning_rate": 9.4292839336179e-06, |
|
"loss": 0.627, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.7296, |
|
"grad_norm": 0.6046479344367981, |
|
"learning_rate": 9.40297765928369e-06, |
|
"loss": 0.6597, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.7392, |
|
"grad_norm": 0.557981014251709, |
|
"learning_rate": 9.376117109543769e-06, |
|
"loss": 0.6607, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.7488, |
|
"grad_norm": 0.6756407022476196, |
|
"learning_rate": 9.348705665778479e-06, |
|
"loss": 0.6459, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.7584, |
|
"grad_norm": 0.6524885296821594, |
|
"learning_rate": 9.320746778718274e-06, |
|
"loss": 0.6418, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.768, |
|
"grad_norm": 0.5784417986869812, |
|
"learning_rate": 9.292243968009332e-06, |
|
"loss": 0.6269, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.7776, |
|
"grad_norm": 0.6418123245239258, |
|
"learning_rate": 9.263200821770462e-06, |
|
"loss": 0.63, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.7872, |
|
"grad_norm": 0.6497018337249756, |
|
"learning_rate": 9.233620996141421e-06, |
|
"loss": 0.6605, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.7968, |
|
"grad_norm": 0.6296787261962891, |
|
"learning_rate": 9.203508214822652e-06, |
|
"loss": 0.6678, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.8064, |
|
"grad_norm": 0.6279412508010864, |
|
"learning_rate": 9.172866268606514e-06, |
|
"loss": 0.663, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.816, |
|
"grad_norm": 0.5324777960777283, |
|
"learning_rate": 9.141699014900084e-06, |
|
"loss": 0.6427, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.8256, |
|
"grad_norm": 0.6004756093025208, |
|
"learning_rate": 9.110010377239552e-06, |
|
"loss": 0.6407, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.8352, |
|
"grad_norm": 0.6031383872032166, |
|
"learning_rate": 9.077804344796302e-06, |
|
"loss": 0.634, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.8448, |
|
"grad_norm": 0.5144718885421753, |
|
"learning_rate": 9.045084971874738e-06, |
|
"loss": 0.6503, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.8544, |
|
"grad_norm": 0.6444131731987, |
|
"learning_rate": 9.011856377401891e-06, |
|
"loss": 0.6418, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.864, |
|
"grad_norm": 0.6462003588676453, |
|
"learning_rate": 8.978122744408905e-06, |
|
"loss": 0.6438, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.8736, |
|
"grad_norm": 0.5521820783615112, |
|
"learning_rate": 8.943888319504456e-06, |
|
"loss": 0.6566, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.8832, |
|
"grad_norm": 0.5820996761322021, |
|
"learning_rate": 8.90915741234015e-06, |
|
"loss": 0.6361, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.8928, |
|
"grad_norm": 0.7227920889854431, |
|
"learning_rate": 8.873934395068006e-06, |
|
"loss": 0.6652, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.9024, |
|
"grad_norm": 0.5779785513877869, |
|
"learning_rate": 8.838223701790057e-06, |
|
"loss": 0.6216, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.912, |
|
"grad_norm": 0.5625780820846558, |
|
"learning_rate": 8.802029828000157e-06, |
|
"loss": 0.6123, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.9216, |
|
"grad_norm": 0.6641936302185059, |
|
"learning_rate": 8.765357330018056e-06, |
|
"loss": 0.6392, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.9312, |
|
"grad_norm": 0.514313280582428, |
|
"learning_rate": 8.728210824415829e-06, |
|
"loss": 0.6051, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.9408, |
|
"grad_norm": 0.6550350189208984, |
|
"learning_rate": 8.690594987436705e-06, |
|
"loss": 0.6084, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.9504, |
|
"grad_norm": 0.545590341091156, |
|
"learning_rate": 8.652514554406388e-06, |
|
"loss": 0.6335, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.5167790055274963, |
|
"learning_rate": 8.613974319136959e-06, |
|
"loss": 0.6244, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.9696, |
|
"grad_norm": 0.678073525428772, |
|
"learning_rate": 8.574979133323378e-06, |
|
"loss": 0.6363, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.9792, |
|
"grad_norm": 0.538718044757843, |
|
"learning_rate": 8.535533905932739e-06, |
|
"loss": 0.6407, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.9888, |
|
"grad_norm": 0.6520209908485413, |
|
"learning_rate": 8.495643602586287e-06, |
|
"loss": 0.6485, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.9984, |
|
"grad_norm": 0.6909246444702148, |
|
"learning_rate": 8.455313244934324e-06, |
|
"loss": 0.6727, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.008, |
|
"grad_norm": 1.312455177307129, |
|
"learning_rate": 8.414547910024035e-06, |
|
"loss": 1.0788, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.0176, |
|
"grad_norm": 0.706491231918335, |
|
"learning_rate": 8.373352729660373e-06, |
|
"loss": 0.5632, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.0272, |
|
"grad_norm": 0.6403841376304626, |
|
"learning_rate": 8.331732889760021e-06, |
|
"loss": 0.5904, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.0368, |
|
"grad_norm": 0.5960009098052979, |
|
"learning_rate": 8.289693629698564e-06, |
|
"loss": 0.5909, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.0464, |
|
"grad_norm": 0.7191261053085327, |
|
"learning_rate": 8.247240241650918e-06, |
|
"loss": 0.6101, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.056, |
|
"grad_norm": 0.5526676177978516, |
|
"learning_rate": 8.204378069925121e-06, |
|
"loss": 0.6091, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.0656, |
|
"grad_norm": 0.6340149641036987, |
|
"learning_rate": 8.16111251028955e-06, |
|
"loss": 0.5899, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.0752, |
|
"grad_norm": 0.5784077644348145, |
|
"learning_rate": 8.117449009293668e-06, |
|
"loss": 0.5446, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.0848, |
|
"grad_norm": 0.6272123456001282, |
|
"learning_rate": 8.073393063582386e-06, |
|
"loss": 0.5883, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.0944, |
|
"grad_norm": 0.5847621560096741, |
|
"learning_rate": 8.0289502192041e-06, |
|
"loss": 0.5656, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.104, |
|
"grad_norm": 0.6778438091278076, |
|
"learning_rate": 7.984126070912519e-06, |
|
"loss": 0.6299, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.1136, |
|
"grad_norm": 0.6147951483726501, |
|
"learning_rate": 7.938926261462366e-06, |
|
"loss": 0.5624, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.1232, |
|
"grad_norm": 0.7127768397331238, |
|
"learning_rate": 7.89335648089903e-06, |
|
"loss": 0.6331, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.1328, |
|
"grad_norm": 0.5153930187225342, |
|
"learning_rate": 7.84742246584226e-06, |
|
"loss": 0.5773, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.1424, |
|
"grad_norm": 0.5773877501487732, |
|
"learning_rate": 7.801129998764014e-06, |
|
"loss": 0.5535, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.152, |
|
"grad_norm": 0.6432895660400391, |
|
"learning_rate": 7.754484907260513e-06, |
|
"loss": 0.6113, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.1616, |
|
"grad_norm": 0.546201765537262, |
|
"learning_rate": 7.70749306331863e-06, |
|
"loss": 0.5759, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.1712, |
|
"grad_norm": 0.561962366104126, |
|
"learning_rate": 7.660160382576683e-06, |
|
"loss": 0.6, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.1808, |
|
"grad_norm": 0.5166497230529785, |
|
"learning_rate": 7.612492823579744e-06, |
|
"loss": 0.6019, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.1904, |
|
"grad_norm": 0.6238263845443726, |
|
"learning_rate": 7.564496387029532e-06, |
|
"loss": 0.6021, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.5210447311401367, |
|
"learning_rate": 7.516177115029002e-06, |
|
"loss": 0.5664, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.2096, |
|
"grad_norm": 0.5317444801330566, |
|
"learning_rate": 7.467541090321735e-06, |
|
"loss": 0.5799, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.2192, |
|
"grad_norm": 0.5516515970230103, |
|
"learning_rate": 7.4185944355261996e-06, |
|
"loss": 0.5393, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.2288000000000001, |
|
"grad_norm": 0.5734798312187195, |
|
"learning_rate": 7.369343312364994e-06, |
|
"loss": 0.6331, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.2384, |
|
"grad_norm": 0.5610190629959106, |
|
"learning_rate": 7.319793920889171e-06, |
|
"loss": 0.5094, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.248, |
|
"grad_norm": 0.5572755336761475, |
|
"learning_rate": 7.269952498697734e-06, |
|
"loss": 0.5987, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.2576, |
|
"grad_norm": 0.5415238738059998, |
|
"learning_rate": 7.219825320152411e-06, |
|
"loss": 0.6032, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.2671999999999999, |
|
"grad_norm": 0.48308759927749634, |
|
"learning_rate": 7.169418695587791e-06, |
|
"loss": 0.488, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.2768, |
|
"grad_norm": 0.5875827074050903, |
|
"learning_rate": 7.118738970516944e-06, |
|
"loss": 0.6191, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.2864, |
|
"grad_norm": 0.4882888197898865, |
|
"learning_rate": 7.067792524832604e-06, |
|
"loss": 0.5268, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.296, |
|
"grad_norm": 0.6158053874969482, |
|
"learning_rate": 7.016585772004026e-06, |
|
"loss": 0.6349, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.3056, |
|
"grad_norm": 0.47751063108444214, |
|
"learning_rate": 6.965125158269619e-06, |
|
"loss": 0.6108, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.3152, |
|
"grad_norm": 0.5912806391716003, |
|
"learning_rate": 6.913417161825449e-06, |
|
"loss": 0.579, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.3248, |
|
"grad_norm": 0.5066422820091248, |
|
"learning_rate": 6.8614682920097265e-06, |
|
"loss": 0.5309, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.3344, |
|
"grad_norm": 0.6349862813949585, |
|
"learning_rate": 6.809285088483361e-06, |
|
"loss": 0.6401, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.3439999999999999, |
|
"grad_norm": 0.4846351444721222, |
|
"learning_rate": 6.7568741204067145e-06, |
|
"loss": 0.5554, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.3536000000000001, |
|
"grad_norm": 0.5736483335494995, |
|
"learning_rate": 6.704241985612625e-06, |
|
"loss": 0.5849, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.3632, |
|
"grad_norm": 0.4874175786972046, |
|
"learning_rate": 6.651395309775837e-06, |
|
"loss": 0.5626, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.3728, |
|
"grad_norm": 0.5509251356124878, |
|
"learning_rate": 6.598340745578908e-06, |
|
"loss": 0.6218, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.3824, |
|
"grad_norm": 0.570919394493103, |
|
"learning_rate": 6.545084971874738e-06, |
|
"loss": 0.5982, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.392, |
|
"grad_norm": 0.5047493577003479, |
|
"learning_rate": 6.491634692845781e-06, |
|
"loss": 0.5755, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.4016, |
|
"grad_norm": 0.4495947062969208, |
|
"learning_rate": 6.437996637160086e-06, |
|
"loss": 0.5432, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.4112, |
|
"grad_norm": 0.5126574039459229, |
|
"learning_rate": 6.384177557124247e-06, |
|
"loss": 0.6209, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.4208, |
|
"grad_norm": 0.504632294178009, |
|
"learning_rate": 6.330184227833376e-06, |
|
"loss": 0.553, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.4304000000000001, |
|
"grad_norm": 0.5119583606719971, |
|
"learning_rate": 6.276023446318214e-06, |
|
"loss": 0.5674, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.5273292660713196, |
|
"learning_rate": 6.2217020306894705e-06, |
|
"loss": 0.5947, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.4496, |
|
"grad_norm": 0.42243409156799316, |
|
"learning_rate": 6.1672268192795285e-06, |
|
"loss": 0.5382, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.4592, |
|
"grad_norm": 0.43099960684776306, |
|
"learning_rate": 6.112604669781572e-06, |
|
"loss": 0.5702, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.4687999999999999, |
|
"grad_norm": 0.5123092532157898, |
|
"learning_rate": 6.057842458386315e-06, |
|
"loss": 0.6564, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.4784, |
|
"grad_norm": 0.470164030790329, |
|
"learning_rate": 6.002947078916365e-06, |
|
"loss": 0.5484, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.488, |
|
"grad_norm": 0.465804785490036, |
|
"learning_rate": 5.947925441958393e-06, |
|
"loss": 0.5918, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.4976, |
|
"grad_norm": 0.45440298318862915, |
|
"learning_rate": 5.892784473993184e-06, |
|
"loss": 0.5526, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.5072, |
|
"grad_norm": 0.522992730140686, |
|
"learning_rate": 5.837531116523683e-06, |
|
"loss": 0.6385, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.5168, |
|
"grad_norm": 0.4795243740081787, |
|
"learning_rate": 5.782172325201155e-06, |
|
"loss": 0.5744, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.5264, |
|
"grad_norm": 0.4822704792022705, |
|
"learning_rate": 5.726715068949564e-06, |
|
"loss": 0.5168, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.536, |
|
"grad_norm": 0.5573145151138306, |
|
"learning_rate": 5.671166329088278e-06, |
|
"loss": 0.6515, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.5455999999999999, |
|
"grad_norm": 0.5187973976135254, |
|
"learning_rate": 5.615533098453215e-06, |
|
"loss": 0.5704, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.5552000000000001, |
|
"grad_norm": 0.47124606370925903, |
|
"learning_rate": 5.559822380516539e-06, |
|
"loss": 0.5753, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.5648, |
|
"grad_norm": 0.5377249717712402, |
|
"learning_rate": 5.504041188505022e-06, |
|
"loss": 0.5911, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.5744, |
|
"grad_norm": 0.4670109152793884, |
|
"learning_rate": 5.448196544517168e-06, |
|
"loss": 0.5771, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.584, |
|
"grad_norm": 0.5638590455055237, |
|
"learning_rate": 5.392295478639226e-06, |
|
"loss": 0.5714, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.5936, |
|
"grad_norm": 0.48816683888435364, |
|
"learning_rate": 5.336345028060199e-06, |
|
"loss": 0.5873, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.6032, |
|
"grad_norm": 0.5010278820991516, |
|
"learning_rate": 5.2803522361859596e-06, |
|
"loss": 0.5667, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.6128, |
|
"grad_norm": 0.5451114177703857, |
|
"learning_rate": 5.224324151752575e-06, |
|
"loss": 0.5715, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.6223999999999998, |
|
"grad_norm": 0.45967310667037964, |
|
"learning_rate": 5.168267827938971e-06, |
|
"loss": 0.5353, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.6320000000000001, |
|
"grad_norm": 0.5528745651245117, |
|
"learning_rate": 5.112190321479026e-06, |
|
"loss": 0.582, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.6416, |
|
"grad_norm": 0.5903663039207458, |
|
"learning_rate": 5.05609869177323e-06, |
|
"loss": 0.6188, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.6512, |
|
"grad_norm": 0.5361656546592712, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4733, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.6608, |
|
"grad_norm": 0.6212491393089294, |
|
"learning_rate": 4.943901308226771e-06, |
|
"loss": 0.6487, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.6703999999999999, |
|
"grad_norm": 0.48965513706207275, |
|
"learning_rate": 4.887809678520976e-06, |
|
"loss": 0.5334, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.6800000000000002, |
|
"grad_norm": 0.5252687931060791, |
|
"learning_rate": 4.831732172061032e-06, |
|
"loss": 0.5959, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.6896, |
|
"grad_norm": 0.6434024572372437, |
|
"learning_rate": 4.775675848247427e-06, |
|
"loss": 0.6187, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.6992, |
|
"grad_norm": 0.5325840711593628, |
|
"learning_rate": 4.719647763814041e-06, |
|
"loss": 0.5754, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.7088, |
|
"grad_norm": 0.4677431881427765, |
|
"learning_rate": 4.663654971939802e-06, |
|
"loss": 0.6058, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.7184, |
|
"grad_norm": 0.4714028537273407, |
|
"learning_rate": 4.6077045213607765e-06, |
|
"loss": 0.5671, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.728, |
|
"grad_norm": 0.5045012831687927, |
|
"learning_rate": 4.551803455482833e-06, |
|
"loss": 0.5921, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.7376, |
|
"grad_norm": 0.5761282444000244, |
|
"learning_rate": 4.4959588114949785e-06, |
|
"loss": 0.6229, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.7471999999999999, |
|
"grad_norm": 0.5460184812545776, |
|
"learning_rate": 4.4401776194834615e-06, |
|
"loss": 0.5371, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.7568000000000001, |
|
"grad_norm": 0.4542312026023865, |
|
"learning_rate": 4.384466901546786e-06, |
|
"loss": 0.6257, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.7664, |
|
"grad_norm": 0.5277249813079834, |
|
"learning_rate": 4.3288336709117246e-06, |
|
"loss": 0.558, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.776, |
|
"grad_norm": 0.44270434975624084, |
|
"learning_rate": 4.273284931050438e-06, |
|
"loss": 0.5644, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.7856, |
|
"grad_norm": 0.4488348066806793, |
|
"learning_rate": 4.217827674798845e-06, |
|
"loss": 0.583, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.7952, |
|
"grad_norm": 0.4872271716594696, |
|
"learning_rate": 4.162468883476319e-06, |
|
"loss": 0.6316, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.8048, |
|
"grad_norm": 0.4470103085041046, |
|
"learning_rate": 4.107215526006818e-06, |
|
"loss": 0.5279, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.8144, |
|
"grad_norm": 0.477935791015625, |
|
"learning_rate": 4.052074558041608e-06, |
|
"loss": 0.5368, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.8239999999999998, |
|
"grad_norm": 0.46086037158966064, |
|
"learning_rate": 3.997052921083637e-06, |
|
"loss": 0.6028, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.8336000000000001, |
|
"grad_norm": 0.4894026219844818, |
|
"learning_rate": 3.9421575416136866e-06, |
|
"loss": 0.5955, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.8432, |
|
"grad_norm": 0.4144808351993561, |
|
"learning_rate": 3.887395330218429e-06, |
|
"loss": 0.5063, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.8528, |
|
"grad_norm": 0.4998006224632263, |
|
"learning_rate": 3.832773180720475e-06, |
|
"loss": 0.6493, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.8624, |
|
"grad_norm": 0.46334385871887207, |
|
"learning_rate": 3.778297969310529e-06, |
|
"loss": 0.5779, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.8719999999999999, |
|
"grad_norm": 0.44945594668388367, |
|
"learning_rate": 3.723976553681787e-06, |
|
"loss": 0.5444, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.8816000000000002, |
|
"grad_norm": 0.48254647850990295, |
|
"learning_rate": 3.669815772166625e-06, |
|
"loss": 0.5834, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.8912, |
|
"grad_norm": 0.4141770303249359, |
|
"learning_rate": 3.6158224428757538e-06, |
|
"loss": 0.5167, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.9008, |
|
"grad_norm": 0.4774608612060547, |
|
"learning_rate": 3.562003362839914e-06, |
|
"loss": 0.576, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.9104, |
|
"grad_norm": 0.46828344464302063, |
|
"learning_rate": 3.50836530715422e-06, |
|
"loss": 0.5696, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.5107264518737793, |
|
"learning_rate": 3.4549150281252635e-06, |
|
"loss": 0.6113, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.9296, |
|
"grad_norm": 0.42679721117019653, |
|
"learning_rate": 3.4016592544210937e-06, |
|
"loss": 0.5417, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.9392, |
|
"grad_norm": 0.47424426674842834, |
|
"learning_rate": 3.3486046902241663e-06, |
|
"loss": 0.5776, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.9487999999999999, |
|
"grad_norm": 0.44007402658462524, |
|
"learning_rate": 3.295758014387375e-06, |
|
"loss": 0.5334, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.9584000000000001, |
|
"grad_norm": 0.519551157951355, |
|
"learning_rate": 3.2431258795932863e-06, |
|
"loss": 0.5946, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.968, |
|
"grad_norm": 0.4806658923625946, |
|
"learning_rate": 3.1907149115166403e-06, |
|
"loss": 0.5666, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.9776, |
|
"grad_norm": 0.47380775213241577, |
|
"learning_rate": 3.1385317079902743e-06, |
|
"loss": 0.597, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.9872, |
|
"grad_norm": 0.48056966066360474, |
|
"learning_rate": 3.0865828381745515e-06, |
|
"loss": 0.538, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.9968, |
|
"grad_norm": 0.5239747762680054, |
|
"learning_rate": 3.0348748417303826e-06, |
|
"loss": 0.5785, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 2.0064, |
|
"grad_norm": 1.2484456300735474, |
|
"learning_rate": 2.9834142279959754e-06, |
|
"loss": 1.0245, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 2.016, |
|
"grad_norm": 0.4434763789176941, |
|
"learning_rate": 2.932207475167398e-06, |
|
"loss": 0.4894, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.0256, |
|
"grad_norm": 0.5152484774589539, |
|
"learning_rate": 2.8812610294830568e-06, |
|
"loss": 0.5657, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 2.0352, |
|
"grad_norm": 0.46128612756729126, |
|
"learning_rate": 2.83058130441221e-06, |
|
"loss": 0.4564, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 2.0448, |
|
"grad_norm": 0.4728868901729584, |
|
"learning_rate": 2.7801746798475905e-06, |
|
"loss": 0.5775, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 2.0544, |
|
"grad_norm": 0.4691709280014038, |
|
"learning_rate": 2.7300475013022666e-06, |
|
"loss": 0.5677, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 2.064, |
|
"grad_norm": 0.4791906177997589, |
|
"learning_rate": 2.6802060791108304e-06, |
|
"loss": 0.5207, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 2.0736, |
|
"grad_norm": 0.450295627117157, |
|
"learning_rate": 2.6306566876350072e-06, |
|
"loss": 0.4673, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 2.0832, |
|
"grad_norm": 0.48020562529563904, |
|
"learning_rate": 2.5814055644738013e-06, |
|
"loss": 0.5457, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 2.0928, |
|
"grad_norm": 0.3935261368751526, |
|
"learning_rate": 2.532458909678266e-06, |
|
"loss": 0.5164, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 2.1024, |
|
"grad_norm": 0.464346706867218, |
|
"learning_rate": 2.483822884971e-06, |
|
"loss": 0.557, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 2.112, |
|
"grad_norm": 0.5267810821533203, |
|
"learning_rate": 2.43550361297047e-06, |
|
"loss": 0.4987, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.1216, |
|
"grad_norm": 0.49507227540016174, |
|
"learning_rate": 2.387507176420256e-06, |
|
"loss": 0.5607, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 2.1312, |
|
"grad_norm": 0.45029518008232117, |
|
"learning_rate": 2.339839617423318e-06, |
|
"loss": 0.5208, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 2.1408, |
|
"grad_norm": 0.44882386922836304, |
|
"learning_rate": 2.2925069366813718e-06, |
|
"loss": 0.5426, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 2.1504, |
|
"grad_norm": 0.47287508845329285, |
|
"learning_rate": 2.245515092739488e-06, |
|
"loss": 0.5239, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 0.4874938726425171, |
|
"learning_rate": 2.1988700012359865e-06, |
|
"loss": 0.5492, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.1696, |
|
"grad_norm": 0.4065110683441162, |
|
"learning_rate": 2.1525775341577404e-06, |
|
"loss": 0.4777, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 2.1792, |
|
"grad_norm": 0.4227883219718933, |
|
"learning_rate": 2.1066435191009717e-06, |
|
"loss": 0.6065, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 2.1888, |
|
"grad_norm": 0.39913421869277954, |
|
"learning_rate": 2.061073738537635e-06, |
|
"loss": 0.4807, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 2.1984, |
|
"grad_norm": 0.4827454686164856, |
|
"learning_rate": 2.0158739290874822e-06, |
|
"loss": 0.5807, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 2.208, |
|
"grad_norm": 0.43721911311149597, |
|
"learning_rate": 1.971049780795901e-06, |
|
"loss": 0.5519, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.2176, |
|
"grad_norm": 0.3625260293483734, |
|
"learning_rate": 1.9266069364176144e-06, |
|
"loss": 0.5069, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 2.2272, |
|
"grad_norm": 0.3953098952770233, |
|
"learning_rate": 1.8825509907063328e-06, |
|
"loss": 0.5327, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 2.2368, |
|
"grad_norm": 0.38376370072364807, |
|
"learning_rate": 1.838887489710452e-06, |
|
"loss": 0.5435, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 2.2464, |
|
"grad_norm": 0.3926437199115753, |
|
"learning_rate": 1.7956219300748796e-06, |
|
"loss": 0.4793, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 2.2560000000000002, |
|
"grad_norm": 0.44054508209228516, |
|
"learning_rate": 1.7527597583490825e-06, |
|
"loss": 0.5631, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 2.2656, |
|
"grad_norm": 0.41770970821380615, |
|
"learning_rate": 1.7103063703014372e-06, |
|
"loss": 0.563, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 2.2752, |
|
"grad_norm": 0.4216345250606537, |
|
"learning_rate": 1.6682671102399806e-06, |
|
"loss": 0.5599, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 2.2848, |
|
"grad_norm": 0.4026756286621094, |
|
"learning_rate": 1.6266472703396286e-06, |
|
"loss": 0.5192, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 2.2944, |
|
"grad_norm": 0.382327139377594, |
|
"learning_rate": 1.5854520899759656e-06, |
|
"loss": 0.5279, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 2.304, |
|
"grad_norm": 0.39819908142089844, |
|
"learning_rate": 1.544686755065677e-06, |
|
"loss": 0.4906, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.3136, |
|
"grad_norm": 0.42366543412208557, |
|
"learning_rate": 1.5043563974137132e-06, |
|
"loss": 0.5485, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 2.3232, |
|
"grad_norm": 0.38778194785118103, |
|
"learning_rate": 1.4644660940672628e-06, |
|
"loss": 0.5269, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 2.3327999999999998, |
|
"grad_norm": 0.38411736488342285, |
|
"learning_rate": 1.4250208666766235e-06, |
|
"loss": 0.5483, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 2.3424, |
|
"grad_norm": 0.40252214670181274, |
|
"learning_rate": 1.3860256808630429e-06, |
|
"loss": 0.5018, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 2.352, |
|
"grad_norm": 0.4110822379589081, |
|
"learning_rate": 1.3474854455936126e-06, |
|
"loss": 0.5486, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 2.3616, |
|
"grad_norm": 0.4108063876628876, |
|
"learning_rate": 1.3094050125632973e-06, |
|
"loss": 0.5573, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 2.3712, |
|
"grad_norm": 0.3773021101951599, |
|
"learning_rate": 1.2717891755841722e-06, |
|
"loss": 0.506, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 2.3808, |
|
"grad_norm": 0.41075634956359863, |
|
"learning_rate": 1.234642669981946e-06, |
|
"loss": 0.5406, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 2.3904, |
|
"grad_norm": 0.3963279128074646, |
|
"learning_rate": 1.1979701719998454e-06, |
|
"loss": 0.545, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.3936994671821594, |
|
"learning_rate": 1.1617762982099446e-06, |
|
"loss": 0.5294, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.4096, |
|
"grad_norm": 0.37653613090515137, |
|
"learning_rate": 1.1260656049319957e-06, |
|
"loss": 0.5246, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 2.4192, |
|
"grad_norm": 0.4054081439971924, |
|
"learning_rate": 1.0908425876598512e-06, |
|
"loss": 0.5649, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 2.4288, |
|
"grad_norm": 0.37866562604904175, |
|
"learning_rate": 1.0561116804955451e-06, |
|
"loss": 0.537, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 2.4384, |
|
"grad_norm": 0.3885415494441986, |
|
"learning_rate": 1.0218772555910955e-06, |
|
"loss": 0.5036, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 2.448, |
|
"grad_norm": 0.418641597032547, |
|
"learning_rate": 9.881436225981107e-07, |
|
"loss": 0.5812, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 2.4576000000000002, |
|
"grad_norm": 0.36240801215171814, |
|
"learning_rate": 9.549150281252633e-07, |
|
"loss": 0.4417, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 2.4672, |
|
"grad_norm": 0.4024696946144104, |
|
"learning_rate": 9.221956552036992e-07, |
|
"loss": 0.5798, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 2.4768, |
|
"grad_norm": 0.38764163851737976, |
|
"learning_rate": 8.899896227604509e-07, |
|
"loss": 0.502, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 2.4864, |
|
"grad_norm": 0.4298345744609833, |
|
"learning_rate": 8.58300985099918e-07, |
|
"loss": 0.5429, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 2.496, |
|
"grad_norm": 0.39933955669403076, |
|
"learning_rate": 8.271337313934869e-07, |
|
"loss": 0.5403, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.5056000000000003, |
|
"grad_norm": 0.3838823437690735, |
|
"learning_rate": 7.964917851773496e-07, |
|
"loss": 0.5212, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 2.5152, |
|
"grad_norm": 0.41426998376846313, |
|
"learning_rate": 7.663790038585794e-07, |
|
"loss": 0.5227, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 2.5248, |
|
"grad_norm": 0.40095555782318115, |
|
"learning_rate": 7.367991782295392e-07, |
|
"loss": 0.4788, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 2.5343999999999998, |
|
"grad_norm": 0.41771426796913147, |
|
"learning_rate": 7.077560319906696e-07, |
|
"loss": 0.5376, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 2.544, |
|
"grad_norm": 0.376322865486145, |
|
"learning_rate": 6.792532212817271e-07, |
|
"loss": 0.4766, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 2.5536, |
|
"grad_norm": 0.4470244348049164, |
|
"learning_rate": 6.512943342215234e-07, |
|
"loss": 0.57, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 2.5632, |
|
"grad_norm": 0.36726924777030945, |
|
"learning_rate": 6.238828904562316e-07, |
|
"loss": 0.4631, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 2.5728, |
|
"grad_norm": 0.40520891547203064, |
|
"learning_rate": 5.9702234071631e-07, |
|
"loss": 0.549, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 2.5824, |
|
"grad_norm": 0.3793184459209442, |
|
"learning_rate": 5.707160663821009e-07, |
|
"loss": 0.5225, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 2.592, |
|
"grad_norm": 0.4228286147117615, |
|
"learning_rate": 5.449673790581611e-07, |
|
"loss": 0.5731, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.6016, |
|
"grad_norm": 0.32514867186546326, |
|
"learning_rate": 5.197795201563744e-07, |
|
"loss": 0.4647, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 2.6112, |
|
"grad_norm": 0.4135151505470276, |
|
"learning_rate": 4.951556604879049e-07, |
|
"loss": 0.6043, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 2.6208, |
|
"grad_norm": 0.37106066942214966, |
|
"learning_rate": 4.710988998640298e-07, |
|
"loss": 0.5218, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 2.6304, |
|
"grad_norm": 0.3782541751861572, |
|
"learning_rate": 4.4761226670592074e-07, |
|
"loss": 0.5365, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 0.40136420726776123, |
|
"learning_rate": 4.2469871766340096e-07, |
|
"loss": 0.5295, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 2.6496, |
|
"grad_norm": 0.3829237222671509, |
|
"learning_rate": 4.0236113724274716e-07, |
|
"loss": 0.544, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 2.6592000000000002, |
|
"grad_norm": 0.4050566554069519, |
|
"learning_rate": 3.8060233744356634e-07, |
|
"loss": 0.5751, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 2.6688, |
|
"grad_norm": 0.3831566274166107, |
|
"learning_rate": 3.5942505740480583e-07, |
|
"loss": 0.483, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 2.6784, |
|
"grad_norm": 0.40931302309036255, |
|
"learning_rate": 3.3883196305992906e-07, |
|
"loss": 0.5429, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 2.6879999999999997, |
|
"grad_norm": 0.4193519651889801, |
|
"learning_rate": 3.18825646801314e-07, |
|
"loss": 0.5176, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.6976, |
|
"grad_norm": 0.38546380400657654, |
|
"learning_rate": 2.9940862715390483e-07, |
|
"loss": 0.5182, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 2.7072000000000003, |
|
"grad_norm": 0.3639739155769348, |
|
"learning_rate": 2.8058334845816214e-07, |
|
"loss": 0.4759, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 2.7168, |
|
"grad_norm": 0.4042200744152069, |
|
"learning_rate": 2.6235218056235633e-07, |
|
"loss": 0.5925, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 2.7264, |
|
"grad_norm": 0.4064270555973053, |
|
"learning_rate": 2.447174185242324e-07, |
|
"loss": 0.5799, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 2.7359999999999998, |
|
"grad_norm": 0.37163835763931274, |
|
"learning_rate": 2.276812823220964e-07, |
|
"loss": 0.4825, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 2.7456, |
|
"grad_norm": 0.39752858877182007, |
|
"learning_rate": 2.1124591657534776e-07, |
|
"loss": 0.5338, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 2.7552, |
|
"grad_norm": 0.36681297421455383, |
|
"learning_rate": 1.9541339027450256e-07, |
|
"loss": 0.5208, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 2.7648, |
|
"grad_norm": 0.38168057799339294, |
|
"learning_rate": 1.801856965207338e-07, |
|
"loss": 0.5941, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 2.7744, |
|
"grad_norm": 0.3118930757045746, |
|
"learning_rate": 1.6556475227496816e-07, |
|
"loss": 0.4631, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 2.784, |
|
"grad_norm": 0.3986196219921112, |
|
"learning_rate": 1.5155239811656562e-07, |
|
"loss": 0.5773, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.7936, |
|
"grad_norm": 0.3834291696548462, |
|
"learning_rate": 1.3815039801161723e-07, |
|
"loss": 0.553, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 2.8032, |
|
"grad_norm": 0.38300490379333496, |
|
"learning_rate": 1.253604390908819e-07, |
|
"loss": 0.5055, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 2.8128, |
|
"grad_norm": 0.40329509973526, |
|
"learning_rate": 1.1318413143740436e-07, |
|
"loss": 0.5562, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 2.8224, |
|
"grad_norm": 0.37382957339286804, |
|
"learning_rate": 1.0162300788382263e-07, |
|
"loss": 0.5532, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 2.832, |
|
"grad_norm": 0.395346462726593, |
|
"learning_rate": 9.0678523819408e-08, |
|
"loss": 0.4918, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 2.8416, |
|
"grad_norm": 0.3847428858280182, |
|
"learning_rate": 8.035205700685167e-08, |
|
"loss": 0.5126, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 2.8512, |
|
"grad_norm": 0.39311036467552185, |
|
"learning_rate": 7.064490740882057e-08, |
|
"loss": 0.5539, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 2.8608000000000002, |
|
"grad_norm": 0.3707370162010193, |
|
"learning_rate": 6.15582970243117e-08, |
|
"loss": 0.5242, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 2.8704, |
|
"grad_norm": 0.39528071880340576, |
|
"learning_rate": 5.3093369734816824e-08, |
|
"loss": 0.5205, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 0.37834280729293823, |
|
"learning_rate": 4.52511911603265e-08, |
|
"loss": 0.5625, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.8895999999999997, |
|
"grad_norm": 0.3650013208389282, |
|
"learning_rate": 3.8032748525179684e-08, |
|
"loss": 0.4993, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 2.8992, |
|
"grad_norm": 0.40211090445518494, |
|
"learning_rate": 3.143895053378698e-08, |
|
"loss": 0.5518, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 2.9088000000000003, |
|
"grad_norm": 0.4369528293609619, |
|
"learning_rate": 2.547062725623828e-08, |
|
"loss": 0.6037, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 2.9184, |
|
"grad_norm": 0.34852176904678345, |
|
"learning_rate": 2.012853002380466e-08, |
|
"loss": 0.4652, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 2.928, |
|
"grad_norm": 0.3797735273838043, |
|
"learning_rate": 1.541333133436018e-08, |
|
"loss": 0.5442, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 2.9375999999999998, |
|
"grad_norm": 0.3867435157299042, |
|
"learning_rate": 1.132562476771959e-08, |
|
"loss": 0.5293, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 2.9472, |
|
"grad_norm": 0.3923242688179016, |
|
"learning_rate": 7.865924910916977e-09, |
|
"loss": 0.554, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 2.9568, |
|
"grad_norm": 0.3982171416282654, |
|
"learning_rate": 5.034667293427053e-09, |
|
"loss": 0.5274, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 2.9664, |
|
"grad_norm": 0.3748815953731537, |
|
"learning_rate": 2.8322083323334417e-09, |
|
"loss": 0.5124, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 2.976, |
|
"grad_norm": 0.37689897418022156, |
|
"learning_rate": 1.2588252874673469e-09, |
|
"loss": 0.5876, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.9856, |
|
"grad_norm": 0.3875444531440735, |
|
"learning_rate": 3.147162264971471e-10, |
|
"loss": 0.5288, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 2.9952, |
|
"grad_norm": 0.37963417172431946, |
|
"learning_rate": 0.0, |
|
"loss": 0.5648, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 2.9952, |
|
"step": 312, |
|
"total_flos": 312484367761408.0, |
|
"train_loss": 0.6120152068443787, |
|
"train_runtime": 29530.8856, |
|
"train_samples_per_second": 1.016, |
|
"train_steps_per_second": 0.011 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 312, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 312484367761408.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|