|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.9904153354632586, |
|
"eval_steps": 500, |
|
"global_step": 312, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.009584664536741214, |
|
"grad_norm": 10.041484832763672, |
|
"learning_rate": 3.125e-07, |
|
"loss": 1.2067, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.019169329073482427, |
|
"grad_norm": 9.69267463684082, |
|
"learning_rate": 6.25e-07, |
|
"loss": 1.1824, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.02875399361022364, |
|
"grad_norm": 9.63055419921875, |
|
"learning_rate": 9.375000000000001e-07, |
|
"loss": 1.1705, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.038338658146964855, |
|
"grad_norm": 9.281330108642578, |
|
"learning_rate": 1.25e-06, |
|
"loss": 1.1789, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.04792332268370607, |
|
"grad_norm": 8.621428489685059, |
|
"learning_rate": 1.5625e-06, |
|
"loss": 1.1452, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.05750798722044728, |
|
"grad_norm": 8.266639709472656, |
|
"learning_rate": 1.8750000000000003e-06, |
|
"loss": 1.1485, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0670926517571885, |
|
"grad_norm": 6.1149001121521, |
|
"learning_rate": 2.1875000000000002e-06, |
|
"loss": 1.0678, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.07667731629392971, |
|
"grad_norm": 5.883880615234375, |
|
"learning_rate": 2.5e-06, |
|
"loss": 1.0821, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.08626198083067092, |
|
"grad_norm": 3.8237390518188477, |
|
"learning_rate": 2.8125e-06, |
|
"loss": 1.0027, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.09584664536741214, |
|
"grad_norm": 3.546095132827759, |
|
"learning_rate": 3.125e-06, |
|
"loss": 1.0096, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.10543130990415335, |
|
"grad_norm": 3.4178311824798584, |
|
"learning_rate": 3.4375e-06, |
|
"loss": 0.9716, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.11501597444089456, |
|
"grad_norm": 3.771919012069702, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 0.9502, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.12460063897763578, |
|
"grad_norm": 3.820408344268799, |
|
"learning_rate": 4.0625000000000005e-06, |
|
"loss": 0.9281, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.134185303514377, |
|
"grad_norm": 3.4381327629089355, |
|
"learning_rate": 4.3750000000000005e-06, |
|
"loss": 0.9087, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.14376996805111822, |
|
"grad_norm": 2.9971580505371094, |
|
"learning_rate": 4.6875000000000004e-06, |
|
"loss": 0.903, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.15335463258785942, |
|
"grad_norm": 2.2153100967407227, |
|
"learning_rate": 5e-06, |
|
"loss": 0.8486, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.16293929712460065, |
|
"grad_norm": 2.4878783226013184, |
|
"learning_rate": 5.3125e-06, |
|
"loss": 0.8321, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.17252396166134185, |
|
"grad_norm": 2.3063278198242188, |
|
"learning_rate": 5.625e-06, |
|
"loss": 0.8269, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.18210862619808307, |
|
"grad_norm": 1.750576376914978, |
|
"learning_rate": 5.9375e-06, |
|
"loss": 0.7997, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.19169329073482427, |
|
"grad_norm": 1.4879165887832642, |
|
"learning_rate": 6.25e-06, |
|
"loss": 0.8034, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.2012779552715655, |
|
"grad_norm": 1.3987113237380981, |
|
"learning_rate": 6.5625e-06, |
|
"loss": 0.7733, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.2108626198083067, |
|
"grad_norm": 1.6606000661849976, |
|
"learning_rate": 6.875e-06, |
|
"loss": 0.7515, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.22044728434504793, |
|
"grad_norm": 1.6238853931427002, |
|
"learning_rate": 7.1875e-06, |
|
"loss": 0.7396, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.23003194888178913, |
|
"grad_norm": 1.3396315574645996, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.7438, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.23961661341853036, |
|
"grad_norm": 1.21493661403656, |
|
"learning_rate": 7.8125e-06, |
|
"loss": 0.7533, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.24920127795527156, |
|
"grad_norm": 1.2480034828186035, |
|
"learning_rate": 8.125000000000001e-06, |
|
"loss": 0.7101, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.25878594249201275, |
|
"grad_norm": 1.399794340133667, |
|
"learning_rate": 8.4375e-06, |
|
"loss": 0.7147, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.268370607028754, |
|
"grad_norm": 1.250038981437683, |
|
"learning_rate": 8.750000000000001e-06, |
|
"loss": 0.7096, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.2779552715654952, |
|
"grad_norm": 1.1550706624984741, |
|
"learning_rate": 9.0625e-06, |
|
"loss": 0.6974, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.28753993610223644, |
|
"grad_norm": 1.114206075668335, |
|
"learning_rate": 9.375000000000001e-06, |
|
"loss": 0.6977, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.2971246006389776, |
|
"grad_norm": 0.9567247629165649, |
|
"learning_rate": 9.6875e-06, |
|
"loss": 0.6966, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.30670926517571884, |
|
"grad_norm": 1.1043249368667603, |
|
"learning_rate": 1e-05, |
|
"loss": 0.6856, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.31629392971246006, |
|
"grad_norm": 1.1120928525924683, |
|
"learning_rate": 9.999685283773504e-06, |
|
"loss": 0.6977, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.3258785942492013, |
|
"grad_norm": 0.8543025851249695, |
|
"learning_rate": 9.998741174712534e-06, |
|
"loss": 0.6773, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.3354632587859425, |
|
"grad_norm": 1.0587968826293945, |
|
"learning_rate": 9.997167791667668e-06, |
|
"loss": 0.6831, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.3450479233226837, |
|
"grad_norm": 1.0576103925704956, |
|
"learning_rate": 9.994965332706574e-06, |
|
"loss": 0.6833, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.3546325878594249, |
|
"grad_norm": 1.0163214206695557, |
|
"learning_rate": 9.992134075089085e-06, |
|
"loss": 0.6733, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.36421725239616615, |
|
"grad_norm": 0.9684771299362183, |
|
"learning_rate": 9.98867437523228e-06, |
|
"loss": 0.6817, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.3738019169329074, |
|
"grad_norm": 0.8340110778808594, |
|
"learning_rate": 9.984586668665641e-06, |
|
"loss": 0.6656, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.38338658146964855, |
|
"grad_norm": 1.0645322799682617, |
|
"learning_rate": 9.979871469976197e-06, |
|
"loss": 0.6639, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.3929712460063898, |
|
"grad_norm": 0.8854984045028687, |
|
"learning_rate": 9.974529372743762e-06, |
|
"loss": 0.6639, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.402555910543131, |
|
"grad_norm": 1.0209273099899292, |
|
"learning_rate": 9.968561049466214e-06, |
|
"loss": 0.6612, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.41214057507987223, |
|
"grad_norm": 0.9446033239364624, |
|
"learning_rate": 9.961967251474823e-06, |
|
"loss": 0.6525, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.4217252396166134, |
|
"grad_norm": 0.8962736129760742, |
|
"learning_rate": 9.954748808839675e-06, |
|
"loss": 0.6451, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.43130990415335463, |
|
"grad_norm": 0.8567093014717102, |
|
"learning_rate": 9.946906630265184e-06, |
|
"loss": 0.6302, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.44089456869009586, |
|
"grad_norm": 1.0150914192199707, |
|
"learning_rate": 9.938441702975689e-06, |
|
"loss": 0.6475, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.4504792332268371, |
|
"grad_norm": 0.8952351808547974, |
|
"learning_rate": 9.92935509259118e-06, |
|
"loss": 0.6582, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.46006389776357826, |
|
"grad_norm": 1.0809413194656372, |
|
"learning_rate": 9.91964794299315e-06, |
|
"loss": 0.6625, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.4696485623003195, |
|
"grad_norm": 0.9264349937438965, |
|
"learning_rate": 9.909321476180594e-06, |
|
"loss": 0.6461, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.4792332268370607, |
|
"grad_norm": 0.7585628628730774, |
|
"learning_rate": 9.898376992116179e-06, |
|
"loss": 0.6465, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.48881789137380194, |
|
"grad_norm": 1.0210459232330322, |
|
"learning_rate": 9.886815868562596e-06, |
|
"loss": 0.6519, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.4984025559105431, |
|
"grad_norm": 0.8413047194480896, |
|
"learning_rate": 9.874639560909118e-06, |
|
"loss": 0.6344, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.5079872204472844, |
|
"grad_norm": 0.848068118095398, |
|
"learning_rate": 9.861849601988384e-06, |
|
"loss": 0.6535, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.5175718849840255, |
|
"grad_norm": 0.8702071905136108, |
|
"learning_rate": 9.848447601883436e-06, |
|
"loss": 0.6366, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.5271565495207667, |
|
"grad_norm": 0.8316821455955505, |
|
"learning_rate": 9.834435247725032e-06, |
|
"loss": 0.6328, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.536741214057508, |
|
"grad_norm": 0.9288855791091919, |
|
"learning_rate": 9.819814303479268e-06, |
|
"loss": 0.6344, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.5463258785942492, |
|
"grad_norm": 0.8851747512817383, |
|
"learning_rate": 9.804586609725499e-06, |
|
"loss": 0.6497, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.5559105431309904, |
|
"grad_norm": 0.8732710480690002, |
|
"learning_rate": 9.788754083424654e-06, |
|
"loss": 0.6227, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.5654952076677316, |
|
"grad_norm": 0.8755611777305603, |
|
"learning_rate": 9.772318717677905e-06, |
|
"loss": 0.6345, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.5750798722044729, |
|
"grad_norm": 1.0519345998764038, |
|
"learning_rate": 9.755282581475769e-06, |
|
"loss": 0.631, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.5846645367412141, |
|
"grad_norm": 0.8587120175361633, |
|
"learning_rate": 9.737647819437645e-06, |
|
"loss": 0.6439, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.5942492012779552, |
|
"grad_norm": 0.8681675791740417, |
|
"learning_rate": 9.719416651541839e-06, |
|
"loss": 0.6523, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.6038338658146964, |
|
"grad_norm": 1.0971438884735107, |
|
"learning_rate": 9.700591372846096e-06, |
|
"loss": 0.6459, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.6134185303514377, |
|
"grad_norm": 0.8674156069755554, |
|
"learning_rate": 9.681174353198687e-06, |
|
"loss": 0.6153, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.6230031948881789, |
|
"grad_norm": 0.8687777519226074, |
|
"learning_rate": 9.661168036940071e-06, |
|
"loss": 0.6249, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.6325878594249201, |
|
"grad_norm": 0.9313914775848389, |
|
"learning_rate": 9.640574942595195e-06, |
|
"loss": 0.6307, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.6421725239616614, |
|
"grad_norm": 0.8277938961982727, |
|
"learning_rate": 9.619397662556434e-06, |
|
"loss": 0.6485, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.6517571884984026, |
|
"grad_norm": 0.7965261340141296, |
|
"learning_rate": 9.597638862757255e-06, |
|
"loss": 0.6015, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.6613418530351438, |
|
"grad_norm": 0.9038349986076355, |
|
"learning_rate": 9.5753012823366e-06, |
|
"loss": 0.618, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.670926517571885, |
|
"grad_norm": 0.9620576500892639, |
|
"learning_rate": 9.552387733294081e-06, |
|
"loss": 0.6182, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.6805111821086262, |
|
"grad_norm": 0.8715839385986328, |
|
"learning_rate": 9.528901100135971e-06, |
|
"loss": 0.6297, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.6900958466453674, |
|
"grad_norm": 0.8728576898574829, |
|
"learning_rate": 9.504844339512096e-06, |
|
"loss": 0.6409, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.6996805111821086, |
|
"grad_norm": 1.0132206678390503, |
|
"learning_rate": 9.480220479843627e-06, |
|
"loss": 0.6418, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.7092651757188498, |
|
"grad_norm": 0.905213475227356, |
|
"learning_rate": 9.45503262094184e-06, |
|
"loss": 0.6153, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.7188498402555911, |
|
"grad_norm": 0.7777428030967712, |
|
"learning_rate": 9.4292839336179e-06, |
|
"loss": 0.6039, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.7284345047923323, |
|
"grad_norm": 1.0897445678710938, |
|
"learning_rate": 9.40297765928369e-06, |
|
"loss": 0.6089, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.7380191693290735, |
|
"grad_norm": 0.9696489572525024, |
|
"learning_rate": 9.376117109543769e-06, |
|
"loss": 0.6156, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.7476038338658147, |
|
"grad_norm": 0.8268092274665833, |
|
"learning_rate": 9.348705665778479e-06, |
|
"loss": 0.6441, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.7571884984025559, |
|
"grad_norm": 1.245557427406311, |
|
"learning_rate": 9.320746778718274e-06, |
|
"loss": 0.6193, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.7667731629392971, |
|
"grad_norm": 0.8154701590538025, |
|
"learning_rate": 9.292243968009332e-06, |
|
"loss": 0.6334, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.7763578274760383, |
|
"grad_norm": 0.9788893461227417, |
|
"learning_rate": 9.263200821770462e-06, |
|
"loss": 0.6302, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.7859424920127795, |
|
"grad_norm": 0.9511937499046326, |
|
"learning_rate": 9.233620996141421e-06, |
|
"loss": 0.6146, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.7955271565495208, |
|
"grad_norm": 1.0094871520996094, |
|
"learning_rate": 9.203508214822652e-06, |
|
"loss": 0.6115, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.805111821086262, |
|
"grad_norm": 0.9043750762939453, |
|
"learning_rate": 9.172866268606514e-06, |
|
"loss": 0.621, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.8146964856230032, |
|
"grad_norm": 0.8562697172164917, |
|
"learning_rate": 9.141699014900084e-06, |
|
"loss": 0.6176, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.8242811501597445, |
|
"grad_norm": 0.8593828082084656, |
|
"learning_rate": 9.110010377239552e-06, |
|
"loss": 0.6378, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.8338658146964856, |
|
"grad_norm": 0.7726905941963196, |
|
"learning_rate": 9.077804344796302e-06, |
|
"loss": 0.6319, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.8434504792332268, |
|
"grad_norm": 0.8027164340019226, |
|
"learning_rate": 9.045084971874738e-06, |
|
"loss": 0.5939, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.853035143769968, |
|
"grad_norm": 0.798473596572876, |
|
"learning_rate": 9.011856377401891e-06, |
|
"loss": 0.5899, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.8626198083067093, |
|
"grad_norm": 0.8748455047607422, |
|
"learning_rate": 8.978122744408905e-06, |
|
"loss": 0.612, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.8722044728434505, |
|
"grad_norm": 0.7628821730613708, |
|
"learning_rate": 8.943888319504456e-06, |
|
"loss": 0.6234, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.8817891373801917, |
|
"grad_norm": 0.7844169735908508, |
|
"learning_rate": 8.90915741234015e-06, |
|
"loss": 0.6128, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.8913738019169329, |
|
"grad_norm": 0.8493337035179138, |
|
"learning_rate": 8.873934395068006e-06, |
|
"loss": 0.6141, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.9009584664536742, |
|
"grad_norm": 0.8148529529571533, |
|
"learning_rate": 8.838223701790057e-06, |
|
"loss": 0.5934, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.9105431309904153, |
|
"grad_norm": 0.7671308517456055, |
|
"learning_rate": 8.802029828000157e-06, |
|
"loss": 0.6037, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.9201277955271565, |
|
"grad_norm": 0.7300805449485779, |
|
"learning_rate": 8.765357330018056e-06, |
|
"loss": 0.5961, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.9297124600638977, |
|
"grad_norm": 0.7626590728759766, |
|
"learning_rate": 8.728210824415829e-06, |
|
"loss": 0.604, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.939297124600639, |
|
"grad_norm": 0.8798259496688843, |
|
"learning_rate": 8.690594987436705e-06, |
|
"loss": 0.6075, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.9488817891373802, |
|
"grad_norm": 0.9205968379974365, |
|
"learning_rate": 8.652514554406388e-06, |
|
"loss": 0.6112, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.9584664536741214, |
|
"grad_norm": 0.7867008447647095, |
|
"learning_rate": 8.613974319136959e-06, |
|
"loss": 0.5946, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.9680511182108626, |
|
"grad_norm": 0.9884403944015503, |
|
"learning_rate": 8.574979133323378e-06, |
|
"loss": 0.6149, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.9776357827476039, |
|
"grad_norm": 0.9288314580917358, |
|
"learning_rate": 8.535533905932739e-06, |
|
"loss": 0.6076, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.987220447284345, |
|
"grad_norm": 0.9012724757194519, |
|
"learning_rate": 8.495643602586287e-06, |
|
"loss": 0.5918, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.9968051118210862, |
|
"grad_norm": 0.8713186979293823, |
|
"learning_rate": 8.455313244934324e-06, |
|
"loss": 0.6128, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.0063897763578276, |
|
"grad_norm": 0.8621015548706055, |
|
"learning_rate": 8.414547910024035e-06, |
|
"loss": 0.5607, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.0159744408945688, |
|
"grad_norm": 0.766989529132843, |
|
"learning_rate": 8.373352729660373e-06, |
|
"loss": 0.5377, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.0255591054313098, |
|
"grad_norm": 0.8968957662582397, |
|
"learning_rate": 8.331732889760021e-06, |
|
"loss": 0.5361, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.035143769968051, |
|
"grad_norm": 0.7241206765174866, |
|
"learning_rate": 8.289693629698564e-06, |
|
"loss": 0.5248, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.0447284345047922, |
|
"grad_norm": 0.8252156376838684, |
|
"learning_rate": 8.247240241650918e-06, |
|
"loss": 0.5699, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.0543130990415335, |
|
"grad_norm": 0.7180123329162598, |
|
"learning_rate": 8.204378069925121e-06, |
|
"loss": 0.5612, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.0638977635782747, |
|
"grad_norm": 0.7721811532974243, |
|
"learning_rate": 8.16111251028955e-06, |
|
"loss": 0.5623, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.073482428115016, |
|
"grad_norm": 0.7802078723907471, |
|
"learning_rate": 8.117449009293668e-06, |
|
"loss": 0.5223, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.0830670926517572, |
|
"grad_norm": 0.8273614048957825, |
|
"learning_rate": 8.073393063582386e-06, |
|
"loss": 0.5623, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.0926517571884984, |
|
"grad_norm": 0.7058457136154175, |
|
"learning_rate": 8.0289502192041e-06, |
|
"loss": 0.5531, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.1022364217252396, |
|
"grad_norm": 0.8331887722015381, |
|
"learning_rate": 7.984126070912519e-06, |
|
"loss": 0.5482, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.1118210862619808, |
|
"grad_norm": 0.7870607376098633, |
|
"learning_rate": 7.938926261462366e-06, |
|
"loss": 0.5439, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.121405750798722, |
|
"grad_norm": 0.7735496759414673, |
|
"learning_rate": 7.89335648089903e-06, |
|
"loss": 0.5357, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.1309904153354633, |
|
"grad_norm": 0.8142541646957397, |
|
"learning_rate": 7.84742246584226e-06, |
|
"loss": 0.5701, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.1405750798722045, |
|
"grad_norm": 0.7338546514511108, |
|
"learning_rate": 7.801129998764014e-06, |
|
"loss": 0.5478, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.1501597444089458, |
|
"grad_norm": 0.7157395482063293, |
|
"learning_rate": 7.754484907260513e-06, |
|
"loss": 0.5677, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.159744408945687, |
|
"grad_norm": 0.8188211917877197, |
|
"learning_rate": 7.70749306331863e-06, |
|
"loss": 0.5379, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.1693290734824282, |
|
"grad_norm": 0.7177704572677612, |
|
"learning_rate": 7.660160382576683e-06, |
|
"loss": 0.5382, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.1789137380191694, |
|
"grad_norm": 0.7849264144897461, |
|
"learning_rate": 7.612492823579744e-06, |
|
"loss": 0.5503, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.1884984025559104, |
|
"grad_norm": 0.8151108026504517, |
|
"learning_rate": 7.564496387029532e-06, |
|
"loss": 0.5486, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.1980830670926517, |
|
"grad_norm": 0.8086329102516174, |
|
"learning_rate": 7.516177115029002e-06, |
|
"loss": 0.5354, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.207667731629393, |
|
"grad_norm": 0.6399027705192566, |
|
"learning_rate": 7.467541090321735e-06, |
|
"loss": 0.5435, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.2172523961661341, |
|
"grad_norm": 0.8837975859642029, |
|
"learning_rate": 7.4185944355261996e-06, |
|
"loss": 0.5491, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.2268370607028753, |
|
"grad_norm": 0.6761747002601624, |
|
"learning_rate": 7.369343312364994e-06, |
|
"loss": 0.5496, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.2364217252396166, |
|
"grad_norm": 0.8938049077987671, |
|
"learning_rate": 7.319793920889171e-06, |
|
"loss": 0.5724, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.2460063897763578, |
|
"grad_norm": 0.6806639432907104, |
|
"learning_rate": 7.269952498697734e-06, |
|
"loss": 0.5535, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.255591054313099, |
|
"grad_norm": 0.6811041831970215, |
|
"learning_rate": 7.219825320152411e-06, |
|
"loss": 0.5419, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.2651757188498403, |
|
"grad_norm": 0.7163997292518616, |
|
"learning_rate": 7.169418695587791e-06, |
|
"loss": 0.5529, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.2747603833865815, |
|
"grad_norm": 0.7321376204490662, |
|
"learning_rate": 7.118738970516944e-06, |
|
"loss": 0.526, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.2843450479233227, |
|
"grad_norm": 0.699560284614563, |
|
"learning_rate": 7.067792524832604e-06, |
|
"loss": 0.5239, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.293929712460064, |
|
"grad_norm": 0.6956992149353027, |
|
"learning_rate": 7.016585772004026e-06, |
|
"loss": 0.5487, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.3035143769968052, |
|
"grad_norm": 0.684252917766571, |
|
"learning_rate": 6.965125158269619e-06, |
|
"loss": 0.5307, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.3130990415335464, |
|
"grad_norm": 0.7382246255874634, |
|
"learning_rate": 6.913417161825449e-06, |
|
"loss": 0.5339, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.3226837060702876, |
|
"grad_norm": 0.693885862827301, |
|
"learning_rate": 6.8614682920097265e-06, |
|
"loss": 0.548, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.3322683706070286, |
|
"grad_norm": 0.6941449642181396, |
|
"learning_rate": 6.809285088483361e-06, |
|
"loss": 0.5611, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.34185303514377, |
|
"grad_norm": 0.7960633635520935, |
|
"learning_rate": 6.7568741204067145e-06, |
|
"loss": 0.5452, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.351437699680511, |
|
"grad_norm": 0.6940522193908691, |
|
"learning_rate": 6.704241985612625e-06, |
|
"loss": 0.5427, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.3610223642172525, |
|
"grad_norm": 0.6586253643035889, |
|
"learning_rate": 6.651395309775837e-06, |
|
"loss": 0.5172, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.3706070287539935, |
|
"grad_norm": 0.6910136342048645, |
|
"learning_rate": 6.598340745578908e-06, |
|
"loss": 0.5403, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.3801916932907348, |
|
"grad_norm": 0.7065456509590149, |
|
"learning_rate": 6.545084971874738e-06, |
|
"loss": 0.5493, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.389776357827476, |
|
"grad_norm": 0.7206940054893494, |
|
"learning_rate": 6.491634692845781e-06, |
|
"loss": 0.5576, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.3993610223642172, |
|
"grad_norm": 0.6870983839035034, |
|
"learning_rate": 6.437996637160086e-06, |
|
"loss": 0.539, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.4089456869009584, |
|
"grad_norm": 0.7133631706237793, |
|
"learning_rate": 6.384177557124247e-06, |
|
"loss": 0.5378, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.4185303514376997, |
|
"grad_norm": 0.7012754082679749, |
|
"learning_rate": 6.330184227833376e-06, |
|
"loss": 0.5227, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.428115015974441, |
|
"grad_norm": 0.7168461680412292, |
|
"learning_rate": 6.276023446318214e-06, |
|
"loss": 0.5287, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.4376996805111821, |
|
"grad_norm": 0.7431371212005615, |
|
"learning_rate": 6.2217020306894705e-06, |
|
"loss": 0.5746, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.4472843450479234, |
|
"grad_norm": 0.8183725476264954, |
|
"learning_rate": 6.1672268192795285e-06, |
|
"loss": 0.5471, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.4568690095846646, |
|
"grad_norm": 0.7414305806159973, |
|
"learning_rate": 6.112604669781572e-06, |
|
"loss": 0.5305, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.4664536741214058, |
|
"grad_norm": 0.7249237298965454, |
|
"learning_rate": 6.057842458386315e-06, |
|
"loss": 0.539, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.476038338658147, |
|
"grad_norm": 0.7848438620567322, |
|
"learning_rate": 6.002947078916365e-06, |
|
"loss": 0.522, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.4856230031948883, |
|
"grad_norm": 0.6666212677955627, |
|
"learning_rate": 5.947925441958393e-06, |
|
"loss": 0.5366, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.4952076677316293, |
|
"grad_norm": 0.8270851969718933, |
|
"learning_rate": 5.892784473993184e-06, |
|
"loss": 0.5529, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.5047923322683707, |
|
"grad_norm": 0.7611240148544312, |
|
"learning_rate": 5.837531116523683e-06, |
|
"loss": 0.5395, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.5143769968051117, |
|
"grad_norm": 0.691460371017456, |
|
"learning_rate": 5.782172325201155e-06, |
|
"loss": 0.5308, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.5239616613418532, |
|
"grad_norm": 0.7503504157066345, |
|
"learning_rate": 5.726715068949564e-06, |
|
"loss": 0.5589, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.5335463258785942, |
|
"grad_norm": 0.7102341651916504, |
|
"learning_rate": 5.671166329088278e-06, |
|
"loss": 0.5587, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.5431309904153354, |
|
"grad_norm": 0.7421011328697205, |
|
"learning_rate": 5.615533098453215e-06, |
|
"loss": 0.5543, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.5527156549520766, |
|
"grad_norm": 0.7281792163848877, |
|
"learning_rate": 5.559822380516539e-06, |
|
"loss": 0.5305, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.5623003194888179, |
|
"grad_norm": 0.6894298791885376, |
|
"learning_rate": 5.504041188505022e-06, |
|
"loss": 0.5215, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.571884984025559, |
|
"grad_norm": 0.67361980676651, |
|
"learning_rate": 5.448196544517168e-06, |
|
"loss": 0.5459, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.5814696485623003, |
|
"grad_norm": 0.6694360375404358, |
|
"learning_rate": 5.392295478639226e-06, |
|
"loss": 0.5323, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.5910543130990416, |
|
"grad_norm": 0.7042282223701477, |
|
"learning_rate": 5.336345028060199e-06, |
|
"loss": 0.5259, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.6006389776357828, |
|
"grad_norm": 0.6842959523200989, |
|
"learning_rate": 5.2803522361859596e-06, |
|
"loss": 0.5551, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.610223642172524, |
|
"grad_norm": 0.7328696250915527, |
|
"learning_rate": 5.224324151752575e-06, |
|
"loss": 0.5459, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.619808306709265, |
|
"grad_norm": 0.6389267444610596, |
|
"learning_rate": 5.168267827938971e-06, |
|
"loss": 0.5276, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.6293929712460065, |
|
"grad_norm": 0.7053073644638062, |
|
"learning_rate": 5.112190321479026e-06, |
|
"loss": 0.5485, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.6389776357827475, |
|
"grad_norm": 0.7645400166511536, |
|
"learning_rate": 5.05609869177323e-06, |
|
"loss": 0.524, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.648562300319489, |
|
"grad_norm": 0.717299222946167, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5217, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.65814696485623, |
|
"grad_norm": 0.6739091277122498, |
|
"learning_rate": 4.943901308226771e-06, |
|
"loss": 0.5256, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.6677316293929714, |
|
"grad_norm": 0.6906968951225281, |
|
"learning_rate": 4.887809678520976e-06, |
|
"loss": 0.5392, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.6773162939297124, |
|
"grad_norm": 0.6453084349632263, |
|
"learning_rate": 4.831732172061032e-06, |
|
"loss": 0.5464, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.6869009584664538, |
|
"grad_norm": 0.6224555969238281, |
|
"learning_rate": 4.775675848247427e-06, |
|
"loss": 0.5596, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.6964856230031948, |
|
"grad_norm": 0.7328355312347412, |
|
"learning_rate": 4.719647763814041e-06, |
|
"loss": 0.5299, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.706070287539936, |
|
"grad_norm": 0.6948109865188599, |
|
"learning_rate": 4.663654971939802e-06, |
|
"loss": 0.5504, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.7156549520766773, |
|
"grad_norm": 0.6178452968597412, |
|
"learning_rate": 4.6077045213607765e-06, |
|
"loss": 0.5312, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.7252396166134185, |
|
"grad_norm": 0.651917040348053, |
|
"learning_rate": 4.551803455482833e-06, |
|
"loss": 0.5372, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.7348242811501597, |
|
"grad_norm": 0.672956645488739, |
|
"learning_rate": 4.4959588114949785e-06, |
|
"loss": 0.5285, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.744408945686901, |
|
"grad_norm": 0.6844072937965393, |
|
"learning_rate": 4.4401776194834615e-06, |
|
"loss": 0.5409, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.7539936102236422, |
|
"grad_norm": 0.6573711037635803, |
|
"learning_rate": 4.384466901546786e-06, |
|
"loss": 0.556, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.7635782747603834, |
|
"grad_norm": 0.6577045917510986, |
|
"learning_rate": 4.3288336709117246e-06, |
|
"loss": 0.5417, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.7731629392971247, |
|
"grad_norm": 0.6672411561012268, |
|
"learning_rate": 4.273284931050438e-06, |
|
"loss": 0.5399, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.7827476038338657, |
|
"grad_norm": 0.6693191528320312, |
|
"learning_rate": 4.217827674798845e-06, |
|
"loss": 0.5355, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.792332268370607, |
|
"grad_norm": 0.6733079552650452, |
|
"learning_rate": 4.162468883476319e-06, |
|
"loss": 0.5187, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.8019169329073481, |
|
"grad_norm": 0.7173909544944763, |
|
"learning_rate": 4.107215526006818e-06, |
|
"loss": 0.5334, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.8115015974440896, |
|
"grad_norm": 0.709580659866333, |
|
"learning_rate": 4.052074558041608e-06, |
|
"loss": 0.5396, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.8210862619808306, |
|
"grad_norm": 0.6447334289550781, |
|
"learning_rate": 3.997052921083637e-06, |
|
"loss": 0.5317, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.830670926517572, |
|
"grad_norm": 0.6373368501663208, |
|
"learning_rate": 3.9421575416136866e-06, |
|
"loss": 0.5388, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.840255591054313, |
|
"grad_norm": 0.672348141670227, |
|
"learning_rate": 3.887395330218429e-06, |
|
"loss": 0.524, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.8498402555910545, |
|
"grad_norm": 0.6385909914970398, |
|
"learning_rate": 3.832773180720475e-06, |
|
"loss": 0.5326, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.8594249201277955, |
|
"grad_norm": 0.6423490047454834, |
|
"learning_rate": 3.778297969310529e-06, |
|
"loss": 0.5274, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.8690095846645367, |
|
"grad_norm": 0.6420339345932007, |
|
"learning_rate": 3.723976553681787e-06, |
|
"loss": 0.5634, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.878594249201278, |
|
"grad_norm": 0.752887487411499, |
|
"learning_rate": 3.669815772166625e-06, |
|
"loss": 0.5351, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.8881789137380192, |
|
"grad_norm": 0.6227625012397766, |
|
"learning_rate": 3.6158224428757538e-06, |
|
"loss": 0.5253, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.8977635782747604, |
|
"grad_norm": 0.5916818976402283, |
|
"learning_rate": 3.562003362839914e-06, |
|
"loss": 0.5327, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.9073482428115016, |
|
"grad_norm": 0.742470920085907, |
|
"learning_rate": 3.50836530715422e-06, |
|
"loss": 0.5583, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.9169329073482428, |
|
"grad_norm": 0.6018843650817871, |
|
"learning_rate": 3.4549150281252635e-06, |
|
"loss": 0.5513, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.926517571884984, |
|
"grad_norm": 0.6124539375305176, |
|
"learning_rate": 3.4016592544210937e-06, |
|
"loss": 0.543, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.9361022364217253, |
|
"grad_norm": 0.6403389573097229, |
|
"learning_rate": 3.3486046902241663e-06, |
|
"loss": 0.531, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.9456869009584663, |
|
"grad_norm": 0.6133631467819214, |
|
"learning_rate": 3.295758014387375e-06, |
|
"loss": 0.5051, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.9552715654952078, |
|
"grad_norm": 0.6370496153831482, |
|
"learning_rate": 3.2431258795932863e-06, |
|
"loss": 0.5461, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.9648562300319488, |
|
"grad_norm": 0.6143704056739807, |
|
"learning_rate": 3.1907149115166403e-06, |
|
"loss": 0.5572, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.9744408945686902, |
|
"grad_norm": 0.6722372770309448, |
|
"learning_rate": 3.1385317079902743e-06, |
|
"loss": 0.5204, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.9840255591054312, |
|
"grad_norm": 0.6286212801933289, |
|
"learning_rate": 3.0865828381745515e-06, |
|
"loss": 0.5419, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.9936102236421727, |
|
"grad_norm": 0.5995206236839294, |
|
"learning_rate": 3.0348748417303826e-06, |
|
"loss": 0.5394, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 2.0031948881789137, |
|
"grad_norm": 0.6550461053848267, |
|
"learning_rate": 2.9834142279959754e-06, |
|
"loss": 0.5119, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 2.012779552715655, |
|
"grad_norm": 0.6472194194793701, |
|
"learning_rate": 2.932207475167398e-06, |
|
"loss": 0.4875, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.022364217252396, |
|
"grad_norm": 0.6644472479820251, |
|
"learning_rate": 2.8812610294830568e-06, |
|
"loss": 0.4528, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 2.0319488817891376, |
|
"grad_norm": 0.5704013705253601, |
|
"learning_rate": 2.83058130441221e-06, |
|
"loss": 0.4957, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 2.0415335463258786, |
|
"grad_norm": 0.6226819157600403, |
|
"learning_rate": 2.7801746798475905e-06, |
|
"loss": 0.4965, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 2.0511182108626196, |
|
"grad_norm": 0.6059399843215942, |
|
"learning_rate": 2.7300475013022666e-06, |
|
"loss": 0.4986, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 2.060702875399361, |
|
"grad_norm": 0.6234830617904663, |
|
"learning_rate": 2.6802060791108304e-06, |
|
"loss": 0.4885, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 2.070287539936102, |
|
"grad_norm": 0.6117391586303711, |
|
"learning_rate": 2.6306566876350072e-06, |
|
"loss": 0.4728, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 2.0798722044728435, |
|
"grad_norm": 0.682826578617096, |
|
"learning_rate": 2.5814055644738013e-06, |
|
"loss": 0.4764, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 2.0894568690095845, |
|
"grad_norm": 0.6085416078567505, |
|
"learning_rate": 2.532458909678266e-06, |
|
"loss": 0.4823, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 2.099041533546326, |
|
"grad_norm": 0.6411639451980591, |
|
"learning_rate": 2.483822884971e-06, |
|
"loss": 0.5015, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 2.108626198083067, |
|
"grad_norm": 0.6138768196105957, |
|
"learning_rate": 2.43550361297047e-06, |
|
"loss": 0.4832, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.1182108626198084, |
|
"grad_norm": 0.6133522987365723, |
|
"learning_rate": 2.387507176420256e-06, |
|
"loss": 0.4701, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 2.1277955271565494, |
|
"grad_norm": 0.594542920589447, |
|
"learning_rate": 2.339839617423318e-06, |
|
"loss": 0.4765, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 2.137380191693291, |
|
"grad_norm": 0.6451693773269653, |
|
"learning_rate": 2.2925069366813718e-06, |
|
"loss": 0.4787, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 2.146964856230032, |
|
"grad_norm": 0.6343329548835754, |
|
"learning_rate": 2.245515092739488e-06, |
|
"loss": 0.4832, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 2.1565495207667733, |
|
"grad_norm": 0.6135368347167969, |
|
"learning_rate": 2.1988700012359865e-06, |
|
"loss": 0.4771, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.1661341853035143, |
|
"grad_norm": 0.5679577589035034, |
|
"learning_rate": 2.1525775341577404e-06, |
|
"loss": 0.4973, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 2.1757188498402558, |
|
"grad_norm": 0.5575957298278809, |
|
"learning_rate": 2.1066435191009717e-06, |
|
"loss": 0.4811, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 2.1853035143769968, |
|
"grad_norm": 0.5644421577453613, |
|
"learning_rate": 2.061073738537635e-06, |
|
"loss": 0.4737, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 2.194888178913738, |
|
"grad_norm": 0.5842664837837219, |
|
"learning_rate": 2.0158739290874822e-06, |
|
"loss": 0.5029, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 2.2044728434504792, |
|
"grad_norm": 0.5931283831596375, |
|
"learning_rate": 1.971049780795901e-06, |
|
"loss": 0.4768, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.2140575079872207, |
|
"grad_norm": 0.6379035115242004, |
|
"learning_rate": 1.9266069364176144e-06, |
|
"loss": 0.4822, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 2.2236421725239617, |
|
"grad_norm": 0.6156995892524719, |
|
"learning_rate": 1.8825509907063328e-06, |
|
"loss": 0.4703, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 2.2332268370607027, |
|
"grad_norm": 0.5634399056434631, |
|
"learning_rate": 1.838887489710452e-06, |
|
"loss": 0.4892, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 2.242811501597444, |
|
"grad_norm": 0.5468457341194153, |
|
"learning_rate": 1.7956219300748796e-06, |
|
"loss": 0.4735, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 2.252396166134185, |
|
"grad_norm": 0.566130518913269, |
|
"learning_rate": 1.7527597583490825e-06, |
|
"loss": 0.4898, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 2.2619808306709266, |
|
"grad_norm": 0.6444130539894104, |
|
"learning_rate": 1.7103063703014372e-06, |
|
"loss": 0.4848, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 2.2715654952076676, |
|
"grad_norm": 0.5839499235153198, |
|
"learning_rate": 1.6682671102399806e-06, |
|
"loss": 0.4904, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 2.281150159744409, |
|
"grad_norm": 0.6181128025054932, |
|
"learning_rate": 1.6266472703396286e-06, |
|
"loss": 0.499, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 2.29073482428115, |
|
"grad_norm": 0.5924781560897827, |
|
"learning_rate": 1.5854520899759656e-06, |
|
"loss": 0.4669, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 2.3003194888178915, |
|
"grad_norm": 0.548241376876831, |
|
"learning_rate": 1.544686755065677e-06, |
|
"loss": 0.4737, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.3099041533546325, |
|
"grad_norm": 0.5470595955848694, |
|
"learning_rate": 1.5043563974137132e-06, |
|
"loss": 0.4714, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 2.319488817891374, |
|
"grad_norm": 0.5618712902069092, |
|
"learning_rate": 1.4644660940672628e-06, |
|
"loss": 0.4788, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 2.329073482428115, |
|
"grad_norm": 0.6169053912162781, |
|
"learning_rate": 1.4250208666766235e-06, |
|
"loss": 0.4886, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 2.3386581469648564, |
|
"grad_norm": 0.5996852517127991, |
|
"learning_rate": 1.3860256808630429e-06, |
|
"loss": 0.4687, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 2.3482428115015974, |
|
"grad_norm": 0.5412963032722473, |
|
"learning_rate": 1.3474854455936126e-06, |
|
"loss": 0.474, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 2.357827476038339, |
|
"grad_norm": 0.5670292377471924, |
|
"learning_rate": 1.3094050125632973e-06, |
|
"loss": 0.4903, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 2.36741214057508, |
|
"grad_norm": 0.5631871223449707, |
|
"learning_rate": 1.2717891755841722e-06, |
|
"loss": 0.4833, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 2.376996805111821, |
|
"grad_norm": 0.5678842663764954, |
|
"learning_rate": 1.234642669981946e-06, |
|
"loss": 0.4588, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 2.3865814696485623, |
|
"grad_norm": 0.5656471252441406, |
|
"learning_rate": 1.1979701719998454e-06, |
|
"loss": 0.4725, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 2.3961661341853033, |
|
"grad_norm": 0.5302391648292542, |
|
"learning_rate": 1.1617762982099446e-06, |
|
"loss": 0.4874, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.405750798722045, |
|
"grad_norm": 0.5217997431755066, |
|
"learning_rate": 1.1260656049319957e-06, |
|
"loss": 0.4856, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 2.415335463258786, |
|
"grad_norm": 0.5714566707611084, |
|
"learning_rate": 1.0908425876598512e-06, |
|
"loss": 0.4609, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 2.4249201277955272, |
|
"grad_norm": 0.5429855585098267, |
|
"learning_rate": 1.0561116804955451e-06, |
|
"loss": 0.5047, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 2.4345047923322682, |
|
"grad_norm": 0.5583947896957397, |
|
"learning_rate": 1.0218772555910955e-06, |
|
"loss": 0.4884, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 2.4440894568690097, |
|
"grad_norm": 0.5764051675796509, |
|
"learning_rate": 9.881436225981107e-07, |
|
"loss": 0.4758, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 2.4536741214057507, |
|
"grad_norm": 0.5524209141731262, |
|
"learning_rate": 9.549150281252633e-07, |
|
"loss": 0.5023, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 2.463258785942492, |
|
"grad_norm": 0.5488308668136597, |
|
"learning_rate": 9.221956552036992e-07, |
|
"loss": 0.4789, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 2.472843450479233, |
|
"grad_norm": 0.5414860248565674, |
|
"learning_rate": 8.899896227604509e-07, |
|
"loss": 0.4823, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 2.4824281150159746, |
|
"grad_norm": 0.5666077136993408, |
|
"learning_rate": 8.58300985099918e-07, |
|
"loss": 0.4615, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 2.4920127795527156, |
|
"grad_norm": 0.5838495492935181, |
|
"learning_rate": 8.271337313934869e-07, |
|
"loss": 0.464, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.501597444089457, |
|
"grad_norm": 0.5656548738479614, |
|
"learning_rate": 7.964917851773496e-07, |
|
"loss": 0.4767, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 2.511182108626198, |
|
"grad_norm": 0.5187501907348633, |
|
"learning_rate": 7.663790038585794e-07, |
|
"loss": 0.4777, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 2.520766773162939, |
|
"grad_norm": 0.5355112552642822, |
|
"learning_rate": 7.367991782295392e-07, |
|
"loss": 0.4855, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 2.5303514376996805, |
|
"grad_norm": 0.5446317791938782, |
|
"learning_rate": 7.077560319906696e-07, |
|
"loss": 0.4786, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 2.539936102236422, |
|
"grad_norm": 0.5624379515647888, |
|
"learning_rate": 6.792532212817271e-07, |
|
"loss": 0.4815, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 2.549520766773163, |
|
"grad_norm": 0.5310940146446228, |
|
"learning_rate": 6.512943342215234e-07, |
|
"loss": 0.4778, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 2.559105431309904, |
|
"grad_norm": 0.5407336950302124, |
|
"learning_rate": 6.238828904562316e-07, |
|
"loss": 0.4506, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 2.5686900958466454, |
|
"grad_norm": 0.5669533014297485, |
|
"learning_rate": 5.9702234071631e-07, |
|
"loss": 0.481, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 2.5782747603833864, |
|
"grad_norm": 0.5309340357780457, |
|
"learning_rate": 5.707160663821009e-07, |
|
"loss": 0.4851, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 2.587859424920128, |
|
"grad_norm": 0.5235077738761902, |
|
"learning_rate": 5.449673790581611e-07, |
|
"loss": 0.4478, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.597444089456869, |
|
"grad_norm": 0.552655041217804, |
|
"learning_rate": 5.197795201563744e-07, |
|
"loss": 0.4464, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 2.6070287539936103, |
|
"grad_norm": 0.5500283241271973, |
|
"learning_rate": 4.951556604879049e-07, |
|
"loss": 0.489, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 2.6166134185303513, |
|
"grad_norm": 0.5286134481430054, |
|
"learning_rate": 4.710988998640298e-07, |
|
"loss": 0.4739, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 2.626198083067093, |
|
"grad_norm": 0.4994671046733856, |
|
"learning_rate": 4.4761226670592074e-07, |
|
"loss": 0.5058, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 2.635782747603834, |
|
"grad_norm": 0.534156858921051, |
|
"learning_rate": 4.2469871766340096e-07, |
|
"loss": 0.4907, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 2.6453674121405752, |
|
"grad_norm": 0.5577386021614075, |
|
"learning_rate": 4.0236113724274716e-07, |
|
"loss": 0.4506, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 2.6549520766773163, |
|
"grad_norm": 0.513982892036438, |
|
"learning_rate": 3.8060233744356634e-07, |
|
"loss": 0.4662, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 2.6645367412140573, |
|
"grad_norm": 0.5294740796089172, |
|
"learning_rate": 3.5942505740480583e-07, |
|
"loss": 0.4634, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 2.6741214057507987, |
|
"grad_norm": 0.5422140955924988, |
|
"learning_rate": 3.3883196305992906e-07, |
|
"loss": 0.4673, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 2.68370607028754, |
|
"grad_norm": 0.5204587578773499, |
|
"learning_rate": 3.18825646801314e-07, |
|
"loss": 0.4651, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.693290734824281, |
|
"grad_norm": 0.5269270539283752, |
|
"learning_rate": 2.9940862715390483e-07, |
|
"loss": 0.4912, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 2.702875399361022, |
|
"grad_norm": 0.5300841331481934, |
|
"learning_rate": 2.8058334845816214e-07, |
|
"loss": 0.4722, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 2.7124600638977636, |
|
"grad_norm": 0.5122048854827881, |
|
"learning_rate": 2.6235218056235633e-07, |
|
"loss": 0.469, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 2.722044728434505, |
|
"grad_norm": 0.560728907585144, |
|
"learning_rate": 2.447174185242324e-07, |
|
"loss": 0.4698, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 2.731629392971246, |
|
"grad_norm": 0.5124772787094116, |
|
"learning_rate": 2.276812823220964e-07, |
|
"loss": 0.4446, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 2.741214057507987, |
|
"grad_norm": 0.5050658583641052, |
|
"learning_rate": 2.1124591657534776e-07, |
|
"loss": 0.4885, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 2.7507987220447285, |
|
"grad_norm": 0.5437944531440735, |
|
"learning_rate": 1.9541339027450256e-07, |
|
"loss": 0.4689, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 2.7603833865814695, |
|
"grad_norm": 0.4845607876777649, |
|
"learning_rate": 1.801856965207338e-07, |
|
"loss": 0.4963, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 2.769968051118211, |
|
"grad_norm": 0.5433393716812134, |
|
"learning_rate": 1.6556475227496816e-07, |
|
"loss": 0.4848, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 2.779552715654952, |
|
"grad_norm": 0.5294089317321777, |
|
"learning_rate": 1.5155239811656562e-07, |
|
"loss": 0.4549, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.7891373801916934, |
|
"grad_norm": 0.46811607480049133, |
|
"learning_rate": 1.3815039801161723e-07, |
|
"loss": 0.4949, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 2.7987220447284344, |
|
"grad_norm": 0.5526321530342102, |
|
"learning_rate": 1.253604390908819e-07, |
|
"loss": 0.4926, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 2.8083067092651754, |
|
"grad_norm": 0.4953249394893646, |
|
"learning_rate": 1.1318413143740436e-07, |
|
"loss": 0.5013, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 2.817891373801917, |
|
"grad_norm": 0.5125919580459595, |
|
"learning_rate": 1.0162300788382263e-07, |
|
"loss": 0.5025, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 2.8274760383386583, |
|
"grad_norm": 0.5203016996383667, |
|
"learning_rate": 9.0678523819408e-08, |
|
"loss": 0.4653, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 2.8370607028753994, |
|
"grad_norm": 0.5107254981994629, |
|
"learning_rate": 8.035205700685167e-08, |
|
"loss": 0.4868, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 2.8466453674121404, |
|
"grad_norm": 0.5385187268257141, |
|
"learning_rate": 7.064490740882057e-08, |
|
"loss": 0.4773, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 2.856230031948882, |
|
"grad_norm": 0.5249802470207214, |
|
"learning_rate": 6.15582970243117e-08, |
|
"loss": 0.4753, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 2.8658146964856233, |
|
"grad_norm": 0.5248013734817505, |
|
"learning_rate": 5.3093369734816824e-08, |
|
"loss": 0.4778, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 2.8753993610223643, |
|
"grad_norm": 0.4982949495315552, |
|
"learning_rate": 4.52511911603265e-08, |
|
"loss": 0.4856, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.8849840255591053, |
|
"grad_norm": 0.5062425136566162, |
|
"learning_rate": 3.8032748525179684e-08, |
|
"loss": 0.4636, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 2.8945686900958467, |
|
"grad_norm": 0.524002194404602, |
|
"learning_rate": 3.143895053378698e-08, |
|
"loss": 0.4833, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 2.9041533546325877, |
|
"grad_norm": 0.5100042223930359, |
|
"learning_rate": 2.547062725623828e-08, |
|
"loss": 0.4598, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 2.913738019169329, |
|
"grad_norm": 0.498949259519577, |
|
"learning_rate": 2.012853002380466e-08, |
|
"loss": 0.4842, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 2.92332268370607, |
|
"grad_norm": 0.5397353172302246, |
|
"learning_rate": 1.541333133436018e-08, |
|
"loss": 0.4549, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 2.9329073482428116, |
|
"grad_norm": 0.5431365966796875, |
|
"learning_rate": 1.132562476771959e-08, |
|
"loss": 0.4633, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 2.9424920127795526, |
|
"grad_norm": 0.5180041790008545, |
|
"learning_rate": 7.865924910916977e-09, |
|
"loss": 0.4627, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 2.952076677316294, |
|
"grad_norm": 0.5015384554862976, |
|
"learning_rate": 5.034667293427053e-09, |
|
"loss": 0.5014, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 2.961661341853035, |
|
"grad_norm": 0.5198219418525696, |
|
"learning_rate": 2.8322083323334417e-09, |
|
"loss": 0.4546, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 2.9712460063897765, |
|
"grad_norm": 0.5227149128913879, |
|
"learning_rate": 1.2588252874673469e-09, |
|
"loss": 0.4814, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.9808306709265175, |
|
"grad_norm": 0.518096387386322, |
|
"learning_rate": 3.147162264971471e-10, |
|
"loss": 0.4832, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 2.9904153354632586, |
|
"grad_norm": 0.5191978812217712, |
|
"learning_rate": 0.0, |
|
"loss": 0.4861, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 2.9904153354632586, |
|
"step": 312, |
|
"total_flos": 2.172427208694825e+17, |
|
"train_loss": 0.0, |
|
"train_runtime": 6.2928, |
|
"train_samples_per_second": 4767.384, |
|
"train_steps_per_second": 49.581 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 312, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.172427208694825e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|