EtashGuha's picture
Upload folder using huggingface_hub
1575312 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.9977989728539987,
"eval_steps": 500,
"global_step": 1362,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0022010271460014674,
"grad_norm": 8.602499008178711,
"learning_rate": 7.299270072992701e-08,
"loss": 1.3433,
"step": 1
},
{
"epoch": 0.004402054292002935,
"grad_norm": 8.88303279876709,
"learning_rate": 1.4598540145985402e-07,
"loss": 1.3346,
"step": 2
},
{
"epoch": 0.006603081438004402,
"grad_norm": 8.714223861694336,
"learning_rate": 2.1897810218978106e-07,
"loss": 1.3313,
"step": 3
},
{
"epoch": 0.00880410858400587,
"grad_norm": 8.289698600769043,
"learning_rate": 2.9197080291970804e-07,
"loss": 1.34,
"step": 4
},
{
"epoch": 0.011005135730007337,
"grad_norm": 8.334420204162598,
"learning_rate": 3.6496350364963505e-07,
"loss": 1.2892,
"step": 5
},
{
"epoch": 0.013206162876008804,
"grad_norm": 8.704950332641602,
"learning_rate": 4.379562043795621e-07,
"loss": 1.3271,
"step": 6
},
{
"epoch": 0.015407190022010272,
"grad_norm": 8.579828262329102,
"learning_rate": 5.109489051094891e-07,
"loss": 1.3116,
"step": 7
},
{
"epoch": 0.01760821716801174,
"grad_norm": 8.08187484741211,
"learning_rate": 5.839416058394161e-07,
"loss": 1.2446,
"step": 8
},
{
"epoch": 0.019809244314013204,
"grad_norm": 8.048093795776367,
"learning_rate": 6.569343065693432e-07,
"loss": 1.2933,
"step": 9
},
{
"epoch": 0.022010271460014674,
"grad_norm": 7.852508068084717,
"learning_rate": 7.299270072992701e-07,
"loss": 1.2825,
"step": 10
},
{
"epoch": 0.02421129860601614,
"grad_norm": 8.012701034545898,
"learning_rate": 8.029197080291971e-07,
"loss": 1.3107,
"step": 11
},
{
"epoch": 0.02641232575201761,
"grad_norm": 6.399667739868164,
"learning_rate": 8.759124087591242e-07,
"loss": 1.2443,
"step": 12
},
{
"epoch": 0.028613352898019074,
"grad_norm": 6.580313682556152,
"learning_rate": 9.489051094890511e-07,
"loss": 1.2632,
"step": 13
},
{
"epoch": 0.030814380044020543,
"grad_norm": 6.206697940826416,
"learning_rate": 1.0218978102189781e-06,
"loss": 1.2009,
"step": 14
},
{
"epoch": 0.03301540719002201,
"grad_norm": 5.858867645263672,
"learning_rate": 1.0948905109489052e-06,
"loss": 1.2096,
"step": 15
},
{
"epoch": 0.03521643433602348,
"grad_norm": 4.870204925537109,
"learning_rate": 1.1678832116788322e-06,
"loss": 1.1838,
"step": 16
},
{
"epoch": 0.03741746148202495,
"grad_norm": 3.4847540855407715,
"learning_rate": 1.2408759124087592e-06,
"loss": 1.177,
"step": 17
},
{
"epoch": 0.03961848862802641,
"grad_norm": 3.2664291858673096,
"learning_rate": 1.3138686131386864e-06,
"loss": 1.1786,
"step": 18
},
{
"epoch": 0.04181951577402788,
"grad_norm": 3.3974335193634033,
"learning_rate": 1.3868613138686132e-06,
"loss": 1.1348,
"step": 19
},
{
"epoch": 0.04402054292002935,
"grad_norm": 2.9466233253479004,
"learning_rate": 1.4598540145985402e-06,
"loss": 1.1846,
"step": 20
},
{
"epoch": 0.046221570066030816,
"grad_norm": 2.748234272003174,
"learning_rate": 1.5328467153284674e-06,
"loss": 1.1144,
"step": 21
},
{
"epoch": 0.04842259721203228,
"grad_norm": 2.33402681350708,
"learning_rate": 1.6058394160583942e-06,
"loss": 1.1223,
"step": 22
},
{
"epoch": 0.05062362435803375,
"grad_norm": 2.93180775642395,
"learning_rate": 1.6788321167883212e-06,
"loss": 1.0878,
"step": 23
},
{
"epoch": 0.05282465150403522,
"grad_norm": 3.3263399600982666,
"learning_rate": 1.7518248175182485e-06,
"loss": 1.0738,
"step": 24
},
{
"epoch": 0.055025678650036686,
"grad_norm": 3.226666212081909,
"learning_rate": 1.8248175182481753e-06,
"loss": 1.0859,
"step": 25
},
{
"epoch": 0.05722670579603815,
"grad_norm": 3.244706869125366,
"learning_rate": 1.8978102189781023e-06,
"loss": 1.0906,
"step": 26
},
{
"epoch": 0.05942773294203962,
"grad_norm": 2.7051339149475098,
"learning_rate": 1.9708029197080293e-06,
"loss": 1.0463,
"step": 27
},
{
"epoch": 0.061628760088041086,
"grad_norm": 2.543907403945923,
"learning_rate": 2.0437956204379563e-06,
"loss": 1.0312,
"step": 28
},
{
"epoch": 0.06382978723404255,
"grad_norm": 2.179581880569458,
"learning_rate": 2.1167883211678833e-06,
"loss": 1.0529,
"step": 29
},
{
"epoch": 0.06603081438004402,
"grad_norm": 1.6721938848495483,
"learning_rate": 2.1897810218978103e-06,
"loss": 1.006,
"step": 30
},
{
"epoch": 0.06823184152604549,
"grad_norm": 1.4780975580215454,
"learning_rate": 2.2627737226277373e-06,
"loss": 1.0161,
"step": 31
},
{
"epoch": 0.07043286867204696,
"grad_norm": 1.5052958726882935,
"learning_rate": 2.3357664233576643e-06,
"loss": 1.0106,
"step": 32
},
{
"epoch": 0.07263389581804842,
"grad_norm": 1.7043911218643188,
"learning_rate": 2.4087591240875918e-06,
"loss": 1.0005,
"step": 33
},
{
"epoch": 0.0748349229640499,
"grad_norm": 1.4996953010559082,
"learning_rate": 2.4817518248175183e-06,
"loss": 0.9829,
"step": 34
},
{
"epoch": 0.07703595011005136,
"grad_norm": 1.5174620151519775,
"learning_rate": 2.5547445255474458e-06,
"loss": 0.9406,
"step": 35
},
{
"epoch": 0.07923697725605282,
"grad_norm": 1.261339783668518,
"learning_rate": 2.627737226277373e-06,
"loss": 1.0115,
"step": 36
},
{
"epoch": 0.08143800440205429,
"grad_norm": 1.258482575416565,
"learning_rate": 2.7007299270072994e-06,
"loss": 0.9908,
"step": 37
},
{
"epoch": 0.08363903154805576,
"grad_norm": 1.163866639137268,
"learning_rate": 2.7737226277372264e-06,
"loss": 0.9271,
"step": 38
},
{
"epoch": 0.08584005869405723,
"grad_norm": 1.0042675733566284,
"learning_rate": 2.8467153284671534e-06,
"loss": 0.9711,
"step": 39
},
{
"epoch": 0.0880410858400587,
"grad_norm": 1.0205066204071045,
"learning_rate": 2.9197080291970804e-06,
"loss": 0.9417,
"step": 40
},
{
"epoch": 0.09024211298606016,
"grad_norm": 1.0749284029006958,
"learning_rate": 2.992700729927008e-06,
"loss": 0.9328,
"step": 41
},
{
"epoch": 0.09244314013206163,
"grad_norm": 1.1877307891845703,
"learning_rate": 3.065693430656935e-06,
"loss": 0.9649,
"step": 42
},
{
"epoch": 0.0946441672780631,
"grad_norm": 1.132910132408142,
"learning_rate": 3.1386861313868614e-06,
"loss": 0.9235,
"step": 43
},
{
"epoch": 0.09684519442406456,
"grad_norm": 1.174088478088379,
"learning_rate": 3.2116788321167884e-06,
"loss": 0.9217,
"step": 44
},
{
"epoch": 0.09904622157006603,
"grad_norm": 0.9998783469200134,
"learning_rate": 3.2846715328467155e-06,
"loss": 0.9597,
"step": 45
},
{
"epoch": 0.1012472487160675,
"grad_norm": 0.9148156046867371,
"learning_rate": 3.3576642335766425e-06,
"loss": 0.8903,
"step": 46
},
{
"epoch": 0.10344827586206896,
"grad_norm": 0.9994775652885437,
"learning_rate": 3.43065693430657e-06,
"loss": 0.9253,
"step": 47
},
{
"epoch": 0.10564930300807043,
"grad_norm": 1.0471197366714478,
"learning_rate": 3.503649635036497e-06,
"loss": 0.9305,
"step": 48
},
{
"epoch": 0.1078503301540719,
"grad_norm": 0.9179279804229736,
"learning_rate": 3.576642335766424e-06,
"loss": 0.9159,
"step": 49
},
{
"epoch": 0.11005135730007337,
"grad_norm": 0.9869850277900696,
"learning_rate": 3.6496350364963505e-06,
"loss": 0.9364,
"step": 50
},
{
"epoch": 0.11225238444607484,
"grad_norm": 0.9455310106277466,
"learning_rate": 3.7226277372262775e-06,
"loss": 0.8962,
"step": 51
},
{
"epoch": 0.1144534115920763,
"grad_norm": 0.9848787784576416,
"learning_rate": 3.7956204379562045e-06,
"loss": 0.9262,
"step": 52
},
{
"epoch": 0.11665443873807776,
"grad_norm": 0.978649914264679,
"learning_rate": 3.868613138686132e-06,
"loss": 0.892,
"step": 53
},
{
"epoch": 0.11885546588407923,
"grad_norm": 0.961056649684906,
"learning_rate": 3.9416058394160585e-06,
"loss": 0.9186,
"step": 54
},
{
"epoch": 0.1210564930300807,
"grad_norm": 0.905271589756012,
"learning_rate": 4.014598540145986e-06,
"loss": 0.906,
"step": 55
},
{
"epoch": 0.12325752017608217,
"grad_norm": 0.9186722040176392,
"learning_rate": 4.0875912408759126e-06,
"loss": 0.8883,
"step": 56
},
{
"epoch": 0.12545854732208364,
"grad_norm": 1.0104196071624756,
"learning_rate": 4.16058394160584e-06,
"loss": 0.8991,
"step": 57
},
{
"epoch": 0.1276595744680851,
"grad_norm": 0.9091898202896118,
"learning_rate": 4.233576642335767e-06,
"loss": 0.9089,
"step": 58
},
{
"epoch": 0.12986060161408658,
"grad_norm": 0.8388140201568604,
"learning_rate": 4.306569343065693e-06,
"loss": 0.9012,
"step": 59
},
{
"epoch": 0.13206162876008803,
"grad_norm": 0.9056335091590881,
"learning_rate": 4.379562043795621e-06,
"loss": 0.9002,
"step": 60
},
{
"epoch": 0.13426265590608952,
"grad_norm": 0.9177324771881104,
"learning_rate": 4.452554744525548e-06,
"loss": 0.9246,
"step": 61
},
{
"epoch": 0.13646368305209097,
"grad_norm": 0.9186745882034302,
"learning_rate": 4.525547445255475e-06,
"loss": 0.8762,
"step": 62
},
{
"epoch": 0.13866471019809246,
"grad_norm": 0.8796854019165039,
"learning_rate": 4.598540145985402e-06,
"loss": 0.8759,
"step": 63
},
{
"epoch": 0.1408657373440939,
"grad_norm": 0.838026225566864,
"learning_rate": 4.671532846715329e-06,
"loss": 0.8813,
"step": 64
},
{
"epoch": 0.14306676449009537,
"grad_norm": 0.8501123189926147,
"learning_rate": 4.744525547445255e-06,
"loss": 0.9045,
"step": 65
},
{
"epoch": 0.14526779163609685,
"grad_norm": 0.8895454406738281,
"learning_rate": 4.8175182481751835e-06,
"loss": 0.8773,
"step": 66
},
{
"epoch": 0.1474688187820983,
"grad_norm": 0.8545335531234741,
"learning_rate": 4.89051094890511e-06,
"loss": 0.8702,
"step": 67
},
{
"epoch": 0.1496698459280998,
"grad_norm": 0.8764218091964722,
"learning_rate": 4.963503649635037e-06,
"loss": 0.8952,
"step": 68
},
{
"epoch": 0.15187087307410124,
"grad_norm": 0.8692036867141724,
"learning_rate": 5.036496350364964e-06,
"loss": 0.8997,
"step": 69
},
{
"epoch": 0.15407190022010273,
"grad_norm": 0.8339161276817322,
"learning_rate": 5.1094890510948916e-06,
"loss": 0.8878,
"step": 70
},
{
"epoch": 0.15627292736610418,
"grad_norm": 0.9395849704742432,
"learning_rate": 5.182481751824818e-06,
"loss": 0.8988,
"step": 71
},
{
"epoch": 0.15847395451210564,
"grad_norm": 0.8380614519119263,
"learning_rate": 5.255474452554746e-06,
"loss": 0.878,
"step": 72
},
{
"epoch": 0.16067498165810712,
"grad_norm": 0.8262082934379578,
"learning_rate": 5.328467153284672e-06,
"loss": 0.8676,
"step": 73
},
{
"epoch": 0.16287600880410857,
"grad_norm": 0.7947503328323364,
"learning_rate": 5.401459854014599e-06,
"loss": 0.8288,
"step": 74
},
{
"epoch": 0.16507703595011006,
"grad_norm": 0.8966465592384338,
"learning_rate": 5.474452554744526e-06,
"loss": 0.854,
"step": 75
},
{
"epoch": 0.1672780630961115,
"grad_norm": 0.862066388130188,
"learning_rate": 5.547445255474453e-06,
"loss": 0.8591,
"step": 76
},
{
"epoch": 0.169479090242113,
"grad_norm": 0.8597044348716736,
"learning_rate": 5.62043795620438e-06,
"loss": 0.875,
"step": 77
},
{
"epoch": 0.17168011738811445,
"grad_norm": 0.8399585485458374,
"learning_rate": 5.693430656934307e-06,
"loss": 0.8766,
"step": 78
},
{
"epoch": 0.17388114453411593,
"grad_norm": 0.8056613802909851,
"learning_rate": 5.766423357664233e-06,
"loss": 0.8544,
"step": 79
},
{
"epoch": 0.1760821716801174,
"grad_norm": 0.9118307828903198,
"learning_rate": 5.839416058394161e-06,
"loss": 0.8482,
"step": 80
},
{
"epoch": 0.17828319882611884,
"grad_norm": 0.8373892307281494,
"learning_rate": 5.912408759124088e-06,
"loss": 0.8444,
"step": 81
},
{
"epoch": 0.18048422597212033,
"grad_norm": 0.8192471265792847,
"learning_rate": 5.985401459854016e-06,
"loss": 0.8457,
"step": 82
},
{
"epoch": 0.18268525311812178,
"grad_norm": 0.8045415282249451,
"learning_rate": 6.058394160583942e-06,
"loss": 0.856,
"step": 83
},
{
"epoch": 0.18488628026412326,
"grad_norm": 0.8543170690536499,
"learning_rate": 6.13138686131387e-06,
"loss": 0.8715,
"step": 84
},
{
"epoch": 0.18708730741012472,
"grad_norm": 0.9579630494117737,
"learning_rate": 6.204379562043796e-06,
"loss": 0.8515,
"step": 85
},
{
"epoch": 0.1892883345561262,
"grad_norm": 0.8797301650047302,
"learning_rate": 6.277372262773723e-06,
"loss": 0.8614,
"step": 86
},
{
"epoch": 0.19148936170212766,
"grad_norm": 0.8576250672340393,
"learning_rate": 6.35036496350365e-06,
"loss": 0.8666,
"step": 87
},
{
"epoch": 0.1936903888481291,
"grad_norm": 0.9103819727897644,
"learning_rate": 6.423357664233577e-06,
"loss": 0.831,
"step": 88
},
{
"epoch": 0.1958914159941306,
"grad_norm": 0.9499757885932922,
"learning_rate": 6.496350364963504e-06,
"loss": 0.8816,
"step": 89
},
{
"epoch": 0.19809244314013205,
"grad_norm": 0.9489899277687073,
"learning_rate": 6.569343065693431e-06,
"loss": 0.8717,
"step": 90
},
{
"epoch": 0.20029347028613353,
"grad_norm": 0.8146291971206665,
"learning_rate": 6.6423357664233575e-06,
"loss": 0.8714,
"step": 91
},
{
"epoch": 0.202494497432135,
"grad_norm": 0.8218434453010559,
"learning_rate": 6.715328467153285e-06,
"loss": 0.8157,
"step": 92
},
{
"epoch": 0.20469552457813647,
"grad_norm": 0.8900724649429321,
"learning_rate": 6.7883211678832115e-06,
"loss": 0.849,
"step": 93
},
{
"epoch": 0.20689655172413793,
"grad_norm": 0.8431738018989563,
"learning_rate": 6.86131386861314e-06,
"loss": 0.8604,
"step": 94
},
{
"epoch": 0.2090975788701394,
"grad_norm": 0.8448619842529297,
"learning_rate": 6.934306569343066e-06,
"loss": 0.8682,
"step": 95
},
{
"epoch": 0.21129860601614087,
"grad_norm": 0.8271082043647766,
"learning_rate": 7.007299270072994e-06,
"loss": 0.843,
"step": 96
},
{
"epoch": 0.21349963316214232,
"grad_norm": 0.8270028829574585,
"learning_rate": 7.08029197080292e-06,
"loss": 0.8571,
"step": 97
},
{
"epoch": 0.2157006603081438,
"grad_norm": 0.8812565803527832,
"learning_rate": 7.153284671532848e-06,
"loss": 0.8335,
"step": 98
},
{
"epoch": 0.21790168745414526,
"grad_norm": 0.9207035303115845,
"learning_rate": 7.2262773722627744e-06,
"loss": 0.8766,
"step": 99
},
{
"epoch": 0.22010271460014674,
"grad_norm": 0.8485338687896729,
"learning_rate": 7.299270072992701e-06,
"loss": 0.8622,
"step": 100
},
{
"epoch": 0.2223037417461482,
"grad_norm": 0.8020341992378235,
"learning_rate": 7.3722627737226285e-06,
"loss": 0.8568,
"step": 101
},
{
"epoch": 0.22450476889214968,
"grad_norm": 0.9067462086677551,
"learning_rate": 7.445255474452555e-06,
"loss": 0.8199,
"step": 102
},
{
"epoch": 0.22670579603815114,
"grad_norm": 0.9271589517593384,
"learning_rate": 7.5182481751824825e-06,
"loss": 0.8704,
"step": 103
},
{
"epoch": 0.2289068231841526,
"grad_norm": 0.8645519614219666,
"learning_rate": 7.591240875912409e-06,
"loss": 0.8658,
"step": 104
},
{
"epoch": 0.23110785033015407,
"grad_norm": 0.7933644652366638,
"learning_rate": 7.664233576642336e-06,
"loss": 0.8081,
"step": 105
},
{
"epoch": 0.23330887747615553,
"grad_norm": 0.9333701729774475,
"learning_rate": 7.737226277372264e-06,
"loss": 0.812,
"step": 106
},
{
"epoch": 0.235509904622157,
"grad_norm": 0.9106976985931396,
"learning_rate": 7.810218978102191e-06,
"loss": 0.8577,
"step": 107
},
{
"epoch": 0.23771093176815847,
"grad_norm": 0.8138116598129272,
"learning_rate": 7.883211678832117e-06,
"loss": 0.8563,
"step": 108
},
{
"epoch": 0.23991195891415995,
"grad_norm": 0.8626731634140015,
"learning_rate": 7.956204379562045e-06,
"loss": 0.8241,
"step": 109
},
{
"epoch": 0.2421129860601614,
"grad_norm": 0.8971178531646729,
"learning_rate": 8.029197080291972e-06,
"loss": 0.8559,
"step": 110
},
{
"epoch": 0.2443140132061629,
"grad_norm": 0.8788782358169556,
"learning_rate": 8.1021897810219e-06,
"loss": 0.852,
"step": 111
},
{
"epoch": 0.24651504035216434,
"grad_norm": 0.8196245431900024,
"learning_rate": 8.175182481751825e-06,
"loss": 0.849,
"step": 112
},
{
"epoch": 0.2487160674981658,
"grad_norm": 0.8820701241493225,
"learning_rate": 8.248175182481753e-06,
"loss": 0.8547,
"step": 113
},
{
"epoch": 0.2509170946441673,
"grad_norm": 0.9854983687400818,
"learning_rate": 8.32116788321168e-06,
"loss": 0.8485,
"step": 114
},
{
"epoch": 0.25311812179016874,
"grad_norm": 0.8719347715377808,
"learning_rate": 8.394160583941606e-06,
"loss": 0.8288,
"step": 115
},
{
"epoch": 0.2553191489361702,
"grad_norm": 0.8920370936393738,
"learning_rate": 8.467153284671533e-06,
"loss": 0.8217,
"step": 116
},
{
"epoch": 0.2575201760821717,
"grad_norm": 0.8688334822654724,
"learning_rate": 8.54014598540146e-06,
"loss": 0.8449,
"step": 117
},
{
"epoch": 0.25972120322817316,
"grad_norm": 1.0517594814300537,
"learning_rate": 8.613138686131386e-06,
"loss": 0.8211,
"step": 118
},
{
"epoch": 0.2619222303741746,
"grad_norm": 0.989764928817749,
"learning_rate": 8.686131386861315e-06,
"loss": 0.8506,
"step": 119
},
{
"epoch": 0.26412325752017607,
"grad_norm": 0.8339976668357849,
"learning_rate": 8.759124087591241e-06,
"loss": 0.8597,
"step": 120
},
{
"epoch": 0.2663242846661775,
"grad_norm": 0.9739260077476501,
"learning_rate": 8.832116788321169e-06,
"loss": 0.8336,
"step": 121
},
{
"epoch": 0.26852531181217903,
"grad_norm": 0.9132515788078308,
"learning_rate": 8.905109489051096e-06,
"loss": 0.8589,
"step": 122
},
{
"epoch": 0.2707263389581805,
"grad_norm": 0.8860148787498474,
"learning_rate": 8.978102189781024e-06,
"loss": 0.8022,
"step": 123
},
{
"epoch": 0.27292736610418195,
"grad_norm": 0.9572710990905762,
"learning_rate": 9.05109489051095e-06,
"loss": 0.8554,
"step": 124
},
{
"epoch": 0.2751283932501834,
"grad_norm": 0.8367565274238586,
"learning_rate": 9.124087591240877e-06,
"loss": 0.8417,
"step": 125
},
{
"epoch": 0.2773294203961849,
"grad_norm": 0.9046987891197205,
"learning_rate": 9.197080291970804e-06,
"loss": 0.8255,
"step": 126
},
{
"epoch": 0.27953044754218637,
"grad_norm": 0.9586616158485413,
"learning_rate": 9.27007299270073e-06,
"loss": 0.8014,
"step": 127
},
{
"epoch": 0.2817314746881878,
"grad_norm": 0.9137142300605774,
"learning_rate": 9.343065693430657e-06,
"loss": 0.8454,
"step": 128
},
{
"epoch": 0.2839325018341893,
"grad_norm": 0.9334844350814819,
"learning_rate": 9.416058394160585e-06,
"loss": 0.8393,
"step": 129
},
{
"epoch": 0.28613352898019073,
"grad_norm": 0.9317649006843567,
"learning_rate": 9.48905109489051e-06,
"loss": 0.8257,
"step": 130
},
{
"epoch": 0.28833455612619224,
"grad_norm": 0.886470377445221,
"learning_rate": 9.56204379562044e-06,
"loss": 0.8424,
"step": 131
},
{
"epoch": 0.2905355832721937,
"grad_norm": 0.8501319289207458,
"learning_rate": 9.635036496350367e-06,
"loss": 0.8475,
"step": 132
},
{
"epoch": 0.29273661041819515,
"grad_norm": 0.8071812987327576,
"learning_rate": 9.708029197080293e-06,
"loss": 0.8286,
"step": 133
},
{
"epoch": 0.2949376375641966,
"grad_norm": 0.9328346252441406,
"learning_rate": 9.78102189781022e-06,
"loss": 0.855,
"step": 134
},
{
"epoch": 0.2971386647101981,
"grad_norm": 0.8955098390579224,
"learning_rate": 9.854014598540148e-06,
"loss": 0.8521,
"step": 135
},
{
"epoch": 0.2993396918561996,
"grad_norm": 0.8671036958694458,
"learning_rate": 9.927007299270073e-06,
"loss": 0.8382,
"step": 136
},
{
"epoch": 0.30154071900220103,
"grad_norm": 0.9117414355278015,
"learning_rate": 1e-05,
"loss": 0.8263,
"step": 137
},
{
"epoch": 0.3037417461482025,
"grad_norm": 0.9594736099243164,
"learning_rate": 9.999983557519382e-06,
"loss": 0.868,
"step": 138
},
{
"epoch": 0.30594277329420394,
"grad_norm": 1.0000696182250977,
"learning_rate": 9.999934230185666e-06,
"loss": 0.8524,
"step": 139
},
{
"epoch": 0.30814380044020545,
"grad_norm": 0.8921096920967102,
"learning_rate": 9.99985201832328e-06,
"loss": 0.8304,
"step": 140
},
{
"epoch": 0.3103448275862069,
"grad_norm": 0.8602084517478943,
"learning_rate": 9.999736922472927e-06,
"loss": 0.8327,
"step": 141
},
{
"epoch": 0.31254585473220836,
"grad_norm": 1.058423638343811,
"learning_rate": 9.999588943391597e-06,
"loss": 0.8136,
"step": 142
},
{
"epoch": 0.3147468818782098,
"grad_norm": 0.8639562129974365,
"learning_rate": 9.999408082052544e-06,
"loss": 0.8273,
"step": 143
},
{
"epoch": 0.31694790902421127,
"grad_norm": 0.9425264596939087,
"learning_rate": 9.999194339645292e-06,
"loss": 0.8157,
"step": 144
},
{
"epoch": 0.3191489361702128,
"grad_norm": 0.8584508895874023,
"learning_rate": 9.998947717575624e-06,
"loss": 0.8144,
"step": 145
},
{
"epoch": 0.32134996331621424,
"grad_norm": 0.9385148882865906,
"learning_rate": 9.998668217465569e-06,
"loss": 0.8336,
"step": 146
},
{
"epoch": 0.3235509904622157,
"grad_norm": 0.9032395482063293,
"learning_rate": 9.9983558411534e-06,
"loss": 0.8473,
"step": 147
},
{
"epoch": 0.32575201760821715,
"grad_norm": 0.9225577116012573,
"learning_rate": 9.998010590693612e-06,
"loss": 0.8564,
"step": 148
},
{
"epoch": 0.32795304475421866,
"grad_norm": 0.9421638250350952,
"learning_rate": 9.997632468356915e-06,
"loss": 0.8374,
"step": 149
},
{
"epoch": 0.3301540719002201,
"grad_norm": 0.9442781209945679,
"learning_rate": 9.997221476630217e-06,
"loss": 0.8531,
"step": 150
},
{
"epoch": 0.33235509904622157,
"grad_norm": 1.0274507999420166,
"learning_rate": 9.996777618216608e-06,
"loss": 0.8166,
"step": 151
},
{
"epoch": 0.334556126192223,
"grad_norm": 0.9189411401748657,
"learning_rate": 9.99630089603534e-06,
"loss": 0.8332,
"step": 152
},
{
"epoch": 0.3367571533382245,
"grad_norm": 0.9770063757896423,
"learning_rate": 9.99579131322181e-06,
"loss": 0.8475,
"step": 153
},
{
"epoch": 0.338958180484226,
"grad_norm": 1.0098830461502075,
"learning_rate": 9.995248873127544e-06,
"loss": 0.804,
"step": 154
},
{
"epoch": 0.34115920763022745,
"grad_norm": 0.8306410312652588,
"learning_rate": 9.994673579320162e-06,
"loss": 0.8113,
"step": 155
},
{
"epoch": 0.3433602347762289,
"grad_norm": 0.8494657874107361,
"learning_rate": 9.994065435583368e-06,
"loss": 0.8116,
"step": 156
},
{
"epoch": 0.34556126192223036,
"grad_norm": 0.887860894203186,
"learning_rate": 9.993424445916923e-06,
"loss": 0.8321,
"step": 157
},
{
"epoch": 0.34776228906823187,
"grad_norm": 0.9540331363677979,
"learning_rate": 9.992750614536606e-06,
"loss": 0.8663,
"step": 158
},
{
"epoch": 0.3499633162142333,
"grad_norm": 0.9319275617599487,
"learning_rate": 9.9920439458742e-06,
"loss": 0.8226,
"step": 159
},
{
"epoch": 0.3521643433602348,
"grad_norm": 0.9410102367401123,
"learning_rate": 9.991304444577465e-06,
"loss": 0.8576,
"step": 160
},
{
"epoch": 0.35436537050623623,
"grad_norm": 0.9312860369682312,
"learning_rate": 9.990532115510093e-06,
"loss": 0.7982,
"step": 161
},
{
"epoch": 0.3565663976522377,
"grad_norm": 0.9462873935699463,
"learning_rate": 9.989726963751683e-06,
"loss": 0.8351,
"step": 162
},
{
"epoch": 0.3587674247982392,
"grad_norm": 1.0383718013763428,
"learning_rate": 9.988888994597714e-06,
"loss": 0.8318,
"step": 163
},
{
"epoch": 0.36096845194424065,
"grad_norm": 0.9235098361968994,
"learning_rate": 9.988018213559504e-06,
"loss": 0.8422,
"step": 164
},
{
"epoch": 0.3631694790902421,
"grad_norm": 1.0266047716140747,
"learning_rate": 9.987114626364172e-06,
"loss": 0.8119,
"step": 165
},
{
"epoch": 0.36537050623624356,
"grad_norm": 1.0650782585144043,
"learning_rate": 9.986178238954602e-06,
"loss": 0.8245,
"step": 166
},
{
"epoch": 0.3675715333822451,
"grad_norm": 0.8566809892654419,
"learning_rate": 9.98520905748941e-06,
"loss": 0.8301,
"step": 167
},
{
"epoch": 0.36977256052824653,
"grad_norm": 0.8719614744186401,
"learning_rate": 9.984207088342895e-06,
"loss": 0.8642,
"step": 168
},
{
"epoch": 0.371973587674248,
"grad_norm": 0.936769962310791,
"learning_rate": 9.983172338104996e-06,
"loss": 0.8047,
"step": 169
},
{
"epoch": 0.37417461482024944,
"grad_norm": 0.9317916631698608,
"learning_rate": 9.982104813581263e-06,
"loss": 0.8324,
"step": 170
},
{
"epoch": 0.3763756419662509,
"grad_norm": 0.9052245616912842,
"learning_rate": 9.981004521792793e-06,
"loss": 0.8267,
"step": 171
},
{
"epoch": 0.3785766691122524,
"grad_norm": 0.9102479219436646,
"learning_rate": 9.979871469976197e-06,
"loss": 0.8418,
"step": 172
},
{
"epoch": 0.38077769625825386,
"grad_norm": 0.8687317371368408,
"learning_rate": 9.978705665583548e-06,
"loss": 0.8411,
"step": 173
},
{
"epoch": 0.3829787234042553,
"grad_norm": 0.8432865142822266,
"learning_rate": 9.977507116282333e-06,
"loss": 0.814,
"step": 174
},
{
"epoch": 0.38517975055025677,
"grad_norm": 0.8426979184150696,
"learning_rate": 9.9762758299554e-06,
"loss": 0.8189,
"step": 175
},
{
"epoch": 0.3873807776962582,
"grad_norm": 0.7806556224822998,
"learning_rate": 9.975011814700912e-06,
"loss": 0.8234,
"step": 176
},
{
"epoch": 0.38958180484225974,
"grad_norm": 0.847745954990387,
"learning_rate": 9.973715078832288e-06,
"loss": 0.8322,
"step": 177
},
{
"epoch": 0.3917828319882612,
"grad_norm": 0.9873587489128113,
"learning_rate": 9.972385630878147e-06,
"loss": 0.8466,
"step": 178
},
{
"epoch": 0.39398385913426265,
"grad_norm": 0.8093173503875732,
"learning_rate": 9.971023479582258e-06,
"loss": 0.8243,
"step": 179
},
{
"epoch": 0.3961848862802641,
"grad_norm": 0.955855667591095,
"learning_rate": 9.969628633903483e-06,
"loss": 0.8351,
"step": 180
},
{
"epoch": 0.3983859134262656,
"grad_norm": 0.8876407742500305,
"learning_rate": 9.968201103015707e-06,
"loss": 0.8286,
"step": 181
},
{
"epoch": 0.40058694057226707,
"grad_norm": 0.849768877029419,
"learning_rate": 9.966740896307791e-06,
"loss": 0.8041,
"step": 182
},
{
"epoch": 0.4027879677182685,
"grad_norm": 1.0292125940322876,
"learning_rate": 9.965248023383505e-06,
"loss": 0.8367,
"step": 183
},
{
"epoch": 0.40498899486427,
"grad_norm": 0.9178354144096375,
"learning_rate": 9.96372249406146e-06,
"loss": 0.8346,
"step": 184
},
{
"epoch": 0.40719002201027144,
"grad_norm": 0.9529702067375183,
"learning_rate": 9.962164318375052e-06,
"loss": 0.8391,
"step": 185
},
{
"epoch": 0.40939104915627295,
"grad_norm": 0.9264957308769226,
"learning_rate": 9.960573506572391e-06,
"loss": 0.8139,
"step": 186
},
{
"epoch": 0.4115920763022744,
"grad_norm": 0.8662728071212769,
"learning_rate": 9.95895006911623e-06,
"loss": 0.8009,
"step": 187
},
{
"epoch": 0.41379310344827586,
"grad_norm": 0.9041555523872375,
"learning_rate": 9.957294016683912e-06,
"loss": 0.7845,
"step": 188
},
{
"epoch": 0.4159941305942773,
"grad_norm": 0.8520869016647339,
"learning_rate": 9.955605360167275e-06,
"loss": 0.8311,
"step": 189
},
{
"epoch": 0.4181951577402788,
"grad_norm": 0.8427354097366333,
"learning_rate": 9.9538841106726e-06,
"loss": 0.7831,
"step": 190
},
{
"epoch": 0.4203961848862803,
"grad_norm": 0.936897873878479,
"learning_rate": 9.952130279520535e-06,
"loss": 0.8065,
"step": 191
},
{
"epoch": 0.42259721203228173,
"grad_norm": 0.814750611782074,
"learning_rate": 9.950343878246011e-06,
"loss": 0.8259,
"step": 192
},
{
"epoch": 0.4247982391782832,
"grad_norm": 0.9034767746925354,
"learning_rate": 9.948524918598175e-06,
"loss": 0.7994,
"step": 193
},
{
"epoch": 0.42699926632428464,
"grad_norm": 0.8683601021766663,
"learning_rate": 9.946673412540313e-06,
"loss": 0.8158,
"step": 194
},
{
"epoch": 0.42920029347028615,
"grad_norm": 0.8487507700920105,
"learning_rate": 9.944789372249765e-06,
"loss": 0.7797,
"step": 195
},
{
"epoch": 0.4314013206162876,
"grad_norm": 0.947300910949707,
"learning_rate": 9.94287281011785e-06,
"loss": 0.8293,
"step": 196
},
{
"epoch": 0.43360234776228906,
"grad_norm": 0.9057540893554688,
"learning_rate": 9.94092373874978e-06,
"loss": 0.8264,
"step": 197
},
{
"epoch": 0.4358033749082905,
"grad_norm": 0.872458279132843,
"learning_rate": 9.938942170964583e-06,
"loss": 0.8275,
"step": 198
},
{
"epoch": 0.43800440205429203,
"grad_norm": 0.7907083034515381,
"learning_rate": 9.936928119795017e-06,
"loss": 0.8274,
"step": 199
},
{
"epoch": 0.4402054292002935,
"grad_norm": 0.9132557511329651,
"learning_rate": 9.934881598487478e-06,
"loss": 0.8406,
"step": 200
},
{
"epoch": 0.44240645634629494,
"grad_norm": 0.892181932926178,
"learning_rate": 9.932802620501925e-06,
"loss": 0.7853,
"step": 201
},
{
"epoch": 0.4446074834922964,
"grad_norm": 0.7941415905952454,
"learning_rate": 9.930691199511775e-06,
"loss": 0.8137,
"step": 202
},
{
"epoch": 0.44680851063829785,
"grad_norm": 0.9250680208206177,
"learning_rate": 9.928547349403832e-06,
"loss": 0.857,
"step": 203
},
{
"epoch": 0.44900953778429936,
"grad_norm": 0.888518750667572,
"learning_rate": 9.926371084278178e-06,
"loss": 0.8322,
"step": 204
},
{
"epoch": 0.4512105649303008,
"grad_norm": 0.8937363624572754,
"learning_rate": 9.924162418448093e-06,
"loss": 0.8546,
"step": 205
},
{
"epoch": 0.45341159207630227,
"grad_norm": 0.9054156541824341,
"learning_rate": 9.921921366439958e-06,
"loss": 0.8083,
"step": 206
},
{
"epoch": 0.4556126192223037,
"grad_norm": 0.9927157163619995,
"learning_rate": 9.91964794299315e-06,
"loss": 0.8082,
"step": 207
},
{
"epoch": 0.4578136463683052,
"grad_norm": 0.8348643183708191,
"learning_rate": 9.917342163059959e-06,
"loss": 0.8434,
"step": 208
},
{
"epoch": 0.4600146735143067,
"grad_norm": 0.8938760161399841,
"learning_rate": 9.915004041805482e-06,
"loss": 0.8288,
"step": 209
},
{
"epoch": 0.46221570066030815,
"grad_norm": 0.9009197950363159,
"learning_rate": 9.912633594607526e-06,
"loss": 0.7791,
"step": 210
},
{
"epoch": 0.4644167278063096,
"grad_norm": 0.934923529624939,
"learning_rate": 9.910230837056501e-06,
"loss": 0.8302,
"step": 211
},
{
"epoch": 0.46661775495231106,
"grad_norm": 0.8404641151428223,
"learning_rate": 9.907795784955327e-06,
"loss": 0.8297,
"step": 212
},
{
"epoch": 0.46881878209831257,
"grad_norm": 0.9019914269447327,
"learning_rate": 9.905328454319323e-06,
"loss": 0.7986,
"step": 213
},
{
"epoch": 0.471019809244314,
"grad_norm": 1.0523722171783447,
"learning_rate": 9.902828861376101e-06,
"loss": 0.8001,
"step": 214
},
{
"epoch": 0.4732208363903155,
"grad_norm": 0.9488732218742371,
"learning_rate": 9.900297022565467e-06,
"loss": 0.8247,
"step": 215
},
{
"epoch": 0.47542186353631694,
"grad_norm": 0.9922806620597839,
"learning_rate": 9.897732954539303e-06,
"loss": 0.8148,
"step": 216
},
{
"epoch": 0.4776228906823184,
"grad_norm": 0.8588337898254395,
"learning_rate": 9.895136674161466e-06,
"loss": 0.822,
"step": 217
},
{
"epoch": 0.4798239178283199,
"grad_norm": 0.9150940775871277,
"learning_rate": 9.892508198507671e-06,
"loss": 0.8275,
"step": 218
},
{
"epoch": 0.48202494497432136,
"grad_norm": 0.8538154363632202,
"learning_rate": 9.889847544865383e-06,
"loss": 0.8084,
"step": 219
},
{
"epoch": 0.4842259721203228,
"grad_norm": 0.82140052318573,
"learning_rate": 9.887154730733699e-06,
"loss": 0.7956,
"step": 220
},
{
"epoch": 0.48642699926632427,
"grad_norm": 0.8522570729255676,
"learning_rate": 9.884429773823238e-06,
"loss": 0.8219,
"step": 221
},
{
"epoch": 0.4886280264123258,
"grad_norm": 0.7906549572944641,
"learning_rate": 9.881672692056022e-06,
"loss": 0.7978,
"step": 222
},
{
"epoch": 0.49082905355832723,
"grad_norm": 0.8216557502746582,
"learning_rate": 9.878883503565353e-06,
"loss": 0.8194,
"step": 223
},
{
"epoch": 0.4930300807043287,
"grad_norm": 0.8171641826629639,
"learning_rate": 9.876062226695703e-06,
"loss": 0.82,
"step": 224
},
{
"epoch": 0.49523110785033014,
"grad_norm": 0.9105944633483887,
"learning_rate": 9.87320888000259e-06,
"loss": 0.8335,
"step": 225
},
{
"epoch": 0.4974321349963316,
"grad_norm": 0.8593592643737793,
"learning_rate": 9.870323482252451e-06,
"loss": 0.8305,
"step": 226
},
{
"epoch": 0.4996331621423331,
"grad_norm": 0.794954776763916,
"learning_rate": 9.867406052422525e-06,
"loss": 0.7914,
"step": 227
},
{
"epoch": 0.5018341892883346,
"grad_norm": 1.0287636518478394,
"learning_rate": 9.864456609700726e-06,
"loss": 0.8232,
"step": 228
},
{
"epoch": 0.504035216434336,
"grad_norm": 0.8502154350280762,
"learning_rate": 9.861475173485516e-06,
"loss": 0.8205,
"step": 229
},
{
"epoch": 0.5062362435803375,
"grad_norm": 0.8397971987724304,
"learning_rate": 9.858461763385776e-06,
"loss": 0.7954,
"step": 230
},
{
"epoch": 0.5084372707263389,
"grad_norm": 0.9718735218048096,
"learning_rate": 9.855416399220683e-06,
"loss": 0.8123,
"step": 231
},
{
"epoch": 0.5106382978723404,
"grad_norm": 0.8418728113174438,
"learning_rate": 9.852339101019574e-06,
"loss": 0.803,
"step": 232
},
{
"epoch": 0.5128393250183418,
"grad_norm": 0.9093943238258362,
"learning_rate": 9.849229889021814e-06,
"loss": 0.8178,
"step": 233
},
{
"epoch": 0.5150403521643434,
"grad_norm": 0.9761664271354675,
"learning_rate": 9.846088783676666e-06,
"loss": 0.8159,
"step": 234
},
{
"epoch": 0.5172413793103449,
"grad_norm": 0.8195192217826843,
"learning_rate": 9.842915805643156e-06,
"loss": 0.7818,
"step": 235
},
{
"epoch": 0.5194424064563463,
"grad_norm": 0.8351858258247375,
"learning_rate": 9.839710975789937e-06,
"loss": 0.8099,
"step": 236
},
{
"epoch": 0.5216434336023478,
"grad_norm": 0.8101526498794556,
"learning_rate": 9.836474315195148e-06,
"loss": 0.81,
"step": 237
},
{
"epoch": 0.5238444607483492,
"grad_norm": 0.8259810209274292,
"learning_rate": 9.833205845146283e-06,
"loss": 0.8019,
"step": 238
},
{
"epoch": 0.5260454878943507,
"grad_norm": 0.8897265791893005,
"learning_rate": 9.829905587140041e-06,
"loss": 0.8043,
"step": 239
},
{
"epoch": 0.5282465150403521,
"grad_norm": 0.8082166314125061,
"learning_rate": 9.826573562882195e-06,
"loss": 0.7911,
"step": 240
},
{
"epoch": 0.5304475421863536,
"grad_norm": 0.840685248374939,
"learning_rate": 9.823209794287446e-06,
"loss": 0.8002,
"step": 241
},
{
"epoch": 0.532648569332355,
"grad_norm": 0.8207601308822632,
"learning_rate": 9.819814303479268e-06,
"loss": 0.7755,
"step": 242
},
{
"epoch": 0.5348495964783566,
"grad_norm": 0.8527492880821228,
"learning_rate": 9.81638711278978e-06,
"loss": 0.7676,
"step": 243
},
{
"epoch": 0.5370506236243581,
"grad_norm": 0.7807105779647827,
"learning_rate": 9.812928244759591e-06,
"loss": 0.8133,
"step": 244
},
{
"epoch": 0.5392516507703595,
"grad_norm": 0.7921954989433289,
"learning_rate": 9.809437722137647e-06,
"loss": 0.7878,
"step": 245
},
{
"epoch": 0.541452677916361,
"grad_norm": 0.8125286102294922,
"learning_rate": 9.805915567881088e-06,
"loss": 0.8129,
"step": 246
},
{
"epoch": 0.5436537050623624,
"grad_norm": 0.7859922647476196,
"learning_rate": 9.802361805155097e-06,
"loss": 0.8206,
"step": 247
},
{
"epoch": 0.5458547322083639,
"grad_norm": 0.8702252507209778,
"learning_rate": 9.798776457332742e-06,
"loss": 0.8589,
"step": 248
},
{
"epoch": 0.5480557593543653,
"grad_norm": 0.8135796785354614,
"learning_rate": 9.79515954799483e-06,
"loss": 0.8425,
"step": 249
},
{
"epoch": 0.5502567865003668,
"grad_norm": 0.9096975922584534,
"learning_rate": 9.791511100929743e-06,
"loss": 0.7746,
"step": 250
},
{
"epoch": 0.5524578136463683,
"grad_norm": 0.860896646976471,
"learning_rate": 9.78783114013329e-06,
"loss": 0.7908,
"step": 251
},
{
"epoch": 0.5546588407923698,
"grad_norm": 0.8375597596168518,
"learning_rate": 9.784119689808545e-06,
"loss": 0.7896,
"step": 252
},
{
"epoch": 0.5568598679383713,
"grad_norm": 0.9839475750923157,
"learning_rate": 9.780376774365687e-06,
"loss": 0.7684,
"step": 253
},
{
"epoch": 0.5590608950843727,
"grad_norm": 0.8096194267272949,
"learning_rate": 9.776602418421846e-06,
"loss": 0.8065,
"step": 254
},
{
"epoch": 0.5612619222303742,
"grad_norm": 0.8492815494537354,
"learning_rate": 9.772796646800926e-06,
"loss": 0.7867,
"step": 255
},
{
"epoch": 0.5634629493763756,
"grad_norm": 0.8766384720802307,
"learning_rate": 9.768959484533461e-06,
"loss": 0.7898,
"step": 256
},
{
"epoch": 0.5656639765223771,
"grad_norm": 0.8239680528640747,
"learning_rate": 9.765090956856437e-06,
"loss": 0.8074,
"step": 257
},
{
"epoch": 0.5678650036683786,
"grad_norm": 0.8484634757041931,
"learning_rate": 9.76119108921313e-06,
"loss": 0.7929,
"step": 258
},
{
"epoch": 0.57006603081438,
"grad_norm": 0.909271240234375,
"learning_rate": 9.757259907252938e-06,
"loss": 0.8038,
"step": 259
},
{
"epoch": 0.5722670579603815,
"grad_norm": 0.7906305193901062,
"learning_rate": 9.753297436831217e-06,
"loss": 0.7982,
"step": 260
},
{
"epoch": 0.574468085106383,
"grad_norm": 0.8911965489387512,
"learning_rate": 9.749303704009103e-06,
"loss": 0.7919,
"step": 261
},
{
"epoch": 0.5766691122523845,
"grad_norm": 0.8219661116600037,
"learning_rate": 9.745278735053345e-06,
"loss": 0.818,
"step": 262
},
{
"epoch": 0.5788701393983859,
"grad_norm": 0.8384687900543213,
"learning_rate": 9.741222556436132e-06,
"loss": 0.823,
"step": 263
},
{
"epoch": 0.5810711665443874,
"grad_norm": 0.7890099883079529,
"learning_rate": 9.737135194834923e-06,
"loss": 0.8175,
"step": 264
},
{
"epoch": 0.5832721936903889,
"grad_norm": 0.8594048023223877,
"learning_rate": 9.73301667713226e-06,
"loss": 0.8102,
"step": 265
},
{
"epoch": 0.5854732208363903,
"grad_norm": 0.8350965976715088,
"learning_rate": 9.728867030415604e-06,
"loss": 0.7648,
"step": 266
},
{
"epoch": 0.5876742479823918,
"grad_norm": 0.8902925252914429,
"learning_rate": 9.724686281977146e-06,
"loss": 0.8138,
"step": 267
},
{
"epoch": 0.5898752751283932,
"grad_norm": 0.9345558285713196,
"learning_rate": 9.720474459313641e-06,
"loss": 0.8063,
"step": 268
},
{
"epoch": 0.5920763022743947,
"grad_norm": 0.8297165632247925,
"learning_rate": 9.716231590126211e-06,
"loss": 0.7874,
"step": 269
},
{
"epoch": 0.5942773294203962,
"grad_norm": 0.9130961894989014,
"learning_rate": 9.711957702320176e-06,
"loss": 0.7885,
"step": 270
},
{
"epoch": 0.5964783565663977,
"grad_norm": 0.8230470418930054,
"learning_rate": 9.707652824004858e-06,
"loss": 0.8163,
"step": 271
},
{
"epoch": 0.5986793837123991,
"grad_norm": 0.8633292317390442,
"learning_rate": 9.703316983493414e-06,
"loss": 0.8086,
"step": 272
},
{
"epoch": 0.6008804108584006,
"grad_norm": 0.8341038823127747,
"learning_rate": 9.698950209302629e-06,
"loss": 0.7806,
"step": 273
},
{
"epoch": 0.6030814380044021,
"grad_norm": 0.8327906727790833,
"learning_rate": 9.694552530152747e-06,
"loss": 0.7922,
"step": 274
},
{
"epoch": 0.6052824651504035,
"grad_norm": 0.8399573564529419,
"learning_rate": 9.690123974967267e-06,
"loss": 0.7826,
"step": 275
},
{
"epoch": 0.607483492296405,
"grad_norm": 0.8266496062278748,
"learning_rate": 9.68566457287276e-06,
"loss": 0.8058,
"step": 276
},
{
"epoch": 0.6096845194424064,
"grad_norm": 0.9383936524391174,
"learning_rate": 9.681174353198687e-06,
"loss": 0.8074,
"step": 277
},
{
"epoch": 0.6118855465884079,
"grad_norm": 0.8384245038032532,
"learning_rate": 9.67665334547718e-06,
"loss": 0.7802,
"step": 278
},
{
"epoch": 0.6140865737344093,
"grad_norm": 0.898497462272644,
"learning_rate": 9.672101579442875e-06,
"loss": 0.8198,
"step": 279
},
{
"epoch": 0.6162876008804109,
"grad_norm": 0.8414992690086365,
"learning_rate": 9.667519085032701e-06,
"loss": 0.7795,
"step": 280
},
{
"epoch": 0.6184886280264124,
"grad_norm": 0.8452622890472412,
"learning_rate": 9.66290589238569e-06,
"loss": 0.7905,
"step": 281
},
{
"epoch": 0.6206896551724138,
"grad_norm": 0.8603907227516174,
"learning_rate": 9.658262031842772e-06,
"loss": 0.7864,
"step": 282
},
{
"epoch": 0.6228906823184153,
"grad_norm": 0.8132176995277405,
"learning_rate": 9.653587533946583e-06,
"loss": 0.7984,
"step": 283
},
{
"epoch": 0.6250917094644167,
"grad_norm": 0.9247682094573975,
"learning_rate": 9.648882429441258e-06,
"loss": 0.7962,
"step": 284
},
{
"epoch": 0.6272927366104182,
"grad_norm": 0.8044379949569702,
"learning_rate": 9.644146749272234e-06,
"loss": 0.7919,
"step": 285
},
{
"epoch": 0.6294937637564196,
"grad_norm": 0.8088259100914001,
"learning_rate": 9.639380524586044e-06,
"loss": 0.8082,
"step": 286
},
{
"epoch": 0.6316947909024211,
"grad_norm": 0.8488666415214539,
"learning_rate": 9.63458378673011e-06,
"loss": 0.8016,
"step": 287
},
{
"epoch": 0.6338958180484225,
"grad_norm": 0.760798454284668,
"learning_rate": 9.629756567252539e-06,
"loss": 0.7641,
"step": 288
},
{
"epoch": 0.6360968451944241,
"grad_norm": 0.8276031017303467,
"learning_rate": 9.624898897901915e-06,
"loss": 0.7894,
"step": 289
},
{
"epoch": 0.6382978723404256,
"grad_norm": 0.7995277047157288,
"learning_rate": 9.620010810627093e-06,
"loss": 0.789,
"step": 290
},
{
"epoch": 0.640498899486427,
"grad_norm": 0.8171667456626892,
"learning_rate": 9.615092337576987e-06,
"loss": 0.7983,
"step": 291
},
{
"epoch": 0.6426999266324285,
"grad_norm": 0.8365928530693054,
"learning_rate": 9.610143511100354e-06,
"loss": 0.8006,
"step": 292
},
{
"epoch": 0.6449009537784299,
"grad_norm": 0.9799849390983582,
"learning_rate": 9.605164363745588e-06,
"loss": 0.7855,
"step": 293
},
{
"epoch": 0.6471019809244314,
"grad_norm": 0.8243988752365112,
"learning_rate": 9.600154928260499e-06,
"loss": 0.7819,
"step": 294
},
{
"epoch": 0.6493030080704328,
"grad_norm": 0.8963895440101624,
"learning_rate": 9.595115237592112e-06,
"loss": 0.7798,
"step": 295
},
{
"epoch": 0.6515040352164343,
"grad_norm": 0.8542230725288391,
"learning_rate": 9.590045324886429e-06,
"loss": 0.7889,
"step": 296
},
{
"epoch": 0.6537050623624358,
"grad_norm": 0.8682403564453125,
"learning_rate": 9.584945223488227e-06,
"loss": 0.8056,
"step": 297
},
{
"epoch": 0.6559060895084373,
"grad_norm": 0.8441479206085205,
"learning_rate": 9.579814966940833e-06,
"loss": 0.8015,
"step": 298
},
{
"epoch": 0.6581071166544388,
"grad_norm": 0.7679582834243774,
"learning_rate": 9.574654588985907e-06,
"loss": 0.7717,
"step": 299
},
{
"epoch": 0.6603081438004402,
"grad_norm": 0.8768029808998108,
"learning_rate": 9.569464123563212e-06,
"loss": 0.7684,
"step": 300
},
{
"epoch": 0.6625091709464417,
"grad_norm": 0.9103320837020874,
"learning_rate": 9.564243604810401e-06,
"loss": 0.7979,
"step": 301
},
{
"epoch": 0.6647101980924431,
"grad_norm": 0.8766347765922546,
"learning_rate": 9.558993067062785e-06,
"loss": 0.8257,
"step": 302
},
{
"epoch": 0.6669112252384446,
"grad_norm": 0.8240722417831421,
"learning_rate": 9.553712544853109e-06,
"loss": 0.7781,
"step": 303
},
{
"epoch": 0.669112252384446,
"grad_norm": 0.8332318663597107,
"learning_rate": 9.548402072911328e-06,
"loss": 0.7796,
"step": 304
},
{
"epoch": 0.6713132795304475,
"grad_norm": 0.9323394298553467,
"learning_rate": 9.543061686164374e-06,
"loss": 0.7835,
"step": 305
},
{
"epoch": 0.673514306676449,
"grad_norm": 0.7580037117004395,
"learning_rate": 9.537691419735929e-06,
"loss": 0.7762,
"step": 306
},
{
"epoch": 0.6757153338224505,
"grad_norm": 0.799403190612793,
"learning_rate": 9.532291308946191e-06,
"loss": 0.7988,
"step": 307
},
{
"epoch": 0.677916360968452,
"grad_norm": 0.8588136434555054,
"learning_rate": 9.526861389311652e-06,
"loss": 0.7727,
"step": 308
},
{
"epoch": 0.6801173881144534,
"grad_norm": 0.8197365403175354,
"learning_rate": 9.52140169654485e-06,
"loss": 0.7914,
"step": 309
},
{
"epoch": 0.6823184152604549,
"grad_norm": 0.8177473545074463,
"learning_rate": 9.51591226655414e-06,
"loss": 0.7839,
"step": 310
},
{
"epoch": 0.6845194424064563,
"grad_norm": 0.8484516739845276,
"learning_rate": 9.51039313544346e-06,
"loss": 0.7833,
"step": 311
},
{
"epoch": 0.6867204695524578,
"grad_norm": 0.9226696491241455,
"learning_rate": 9.504844339512096e-06,
"loss": 0.8462,
"step": 312
},
{
"epoch": 0.6889214966984593,
"grad_norm": 0.9259462356567383,
"learning_rate": 9.499265915254434e-06,
"loss": 0.8244,
"step": 313
},
{
"epoch": 0.6911225238444607,
"grad_norm": 0.8454598784446716,
"learning_rate": 9.493657899359727e-06,
"loss": 0.8096,
"step": 314
},
{
"epoch": 0.6933235509904622,
"grad_norm": 0.8592511415481567,
"learning_rate": 9.488020328711851e-06,
"loss": 0.8089,
"step": 315
},
{
"epoch": 0.6955245781364637,
"grad_norm": 0.8652984499931335,
"learning_rate": 9.482353240389066e-06,
"loss": 0.7936,
"step": 316
},
{
"epoch": 0.6977256052824652,
"grad_norm": 0.7984046339988708,
"learning_rate": 9.476656671663766e-06,
"loss": 0.7941,
"step": 317
},
{
"epoch": 0.6999266324284666,
"grad_norm": 0.7974331378936768,
"learning_rate": 9.470930660002241e-06,
"loss": 0.8217,
"step": 318
},
{
"epoch": 0.7021276595744681,
"grad_norm": 0.883917510509491,
"learning_rate": 9.465175243064428e-06,
"loss": 0.8123,
"step": 319
},
{
"epoch": 0.7043286867204696,
"grad_norm": 0.8068121075630188,
"learning_rate": 9.459390458703654e-06,
"loss": 0.7959,
"step": 320
},
{
"epoch": 0.706529713866471,
"grad_norm": 0.7877749800682068,
"learning_rate": 9.453576344966404e-06,
"loss": 0.7854,
"step": 321
},
{
"epoch": 0.7087307410124725,
"grad_norm": 0.8329615592956543,
"learning_rate": 9.44773294009206e-06,
"loss": 0.8343,
"step": 322
},
{
"epoch": 0.7109317681584739,
"grad_norm": 0.8515647649765015,
"learning_rate": 9.441860282512648e-06,
"loss": 0.7964,
"step": 323
},
{
"epoch": 0.7131327953044754,
"grad_norm": 0.8183252811431885,
"learning_rate": 9.435958410852593e-06,
"loss": 0.8008,
"step": 324
},
{
"epoch": 0.7153338224504769,
"grad_norm": 0.8000170588493347,
"learning_rate": 9.430027363928458e-06,
"loss": 0.791,
"step": 325
},
{
"epoch": 0.7175348495964784,
"grad_norm": 0.8109614849090576,
"learning_rate": 9.424067180748692e-06,
"loss": 0.7945,
"step": 326
},
{
"epoch": 0.7197358767424799,
"grad_norm": 0.8509262204170227,
"learning_rate": 9.418077900513377e-06,
"loss": 0.7978,
"step": 327
},
{
"epoch": 0.7219369038884813,
"grad_norm": 0.8893531560897827,
"learning_rate": 9.41205956261396e-06,
"loss": 0.7739,
"step": 328
},
{
"epoch": 0.7241379310344828,
"grad_norm": 0.7971740365028381,
"learning_rate": 9.406012206633004e-06,
"loss": 0.7816,
"step": 329
},
{
"epoch": 0.7263389581804842,
"grad_norm": 0.8178066611289978,
"learning_rate": 9.39993587234392e-06,
"loss": 0.7841,
"step": 330
},
{
"epoch": 0.7285399853264857,
"grad_norm": 0.9168744683265686,
"learning_rate": 9.393830599710714e-06,
"loss": 0.7721,
"step": 331
},
{
"epoch": 0.7307410124724871,
"grad_norm": 0.7745696902275085,
"learning_rate": 9.387696428887715e-06,
"loss": 0.7675,
"step": 332
},
{
"epoch": 0.7329420396184886,
"grad_norm": 0.8073949813842773,
"learning_rate": 9.381533400219319e-06,
"loss": 0.7837,
"step": 333
},
{
"epoch": 0.7351430667644901,
"grad_norm": 0.7783129811286926,
"learning_rate": 9.375341554239716e-06,
"loss": 0.8124,
"step": 334
},
{
"epoch": 0.7373440939104916,
"grad_norm": 0.9051381349563599,
"learning_rate": 9.369120931672631e-06,
"loss": 0.8004,
"step": 335
},
{
"epoch": 0.7395451210564931,
"grad_norm": 0.8168099522590637,
"learning_rate": 9.362871573431046e-06,
"loss": 0.7809,
"step": 336
},
{
"epoch": 0.7417461482024945,
"grad_norm": 0.8041391968727112,
"learning_rate": 9.356593520616948e-06,
"loss": 0.7765,
"step": 337
},
{
"epoch": 0.743947175348496,
"grad_norm": 0.7938492298126221,
"learning_rate": 9.350286814521037e-06,
"loss": 0.7835,
"step": 338
},
{
"epoch": 0.7461482024944974,
"grad_norm": 0.9754701852798462,
"learning_rate": 9.343951496622473e-06,
"loss": 0.7896,
"step": 339
},
{
"epoch": 0.7483492296404989,
"grad_norm": 0.8210593461990356,
"learning_rate": 9.337587608588588e-06,
"loss": 0.7984,
"step": 340
},
{
"epoch": 0.7505502567865003,
"grad_norm": 0.8359233736991882,
"learning_rate": 9.33119519227463e-06,
"loss": 0.8032,
"step": 341
},
{
"epoch": 0.7527512839325018,
"grad_norm": 0.9186483025550842,
"learning_rate": 9.324774289723469e-06,
"loss": 0.7693,
"step": 342
},
{
"epoch": 0.7549523110785032,
"grad_norm": 0.8890076279640198,
"learning_rate": 9.318324943165331e-06,
"loss": 0.7928,
"step": 343
},
{
"epoch": 0.7571533382245048,
"grad_norm": 0.7948359847068787,
"learning_rate": 9.311847195017518e-06,
"loss": 0.7615,
"step": 344
},
{
"epoch": 0.7593543653705063,
"grad_norm": 0.8466710448265076,
"learning_rate": 9.30534108788413e-06,
"loss": 0.7631,
"step": 345
},
{
"epoch": 0.7615553925165077,
"grad_norm": 0.9230742454528809,
"learning_rate": 9.298806664555783e-06,
"loss": 0.7544,
"step": 346
},
{
"epoch": 0.7637564196625092,
"grad_norm": 0.8038057684898376,
"learning_rate": 9.292243968009332e-06,
"loss": 0.7844,
"step": 347
},
{
"epoch": 0.7659574468085106,
"grad_norm": 0.8111063838005066,
"learning_rate": 9.285653041407575e-06,
"loss": 0.7721,
"step": 348
},
{
"epoch": 0.7681584739545121,
"grad_norm": 1.0078511238098145,
"learning_rate": 9.27903392809899e-06,
"loss": 0.7885,
"step": 349
},
{
"epoch": 0.7703595011005135,
"grad_norm": 0.9313395619392395,
"learning_rate": 9.272386671617431e-06,
"loss": 0.7503,
"step": 350
},
{
"epoch": 0.772560528246515,
"grad_norm": 0.8243265151977539,
"learning_rate": 9.265711315681853e-06,
"loss": 0.7806,
"step": 351
},
{
"epoch": 0.7747615553925165,
"grad_norm": 0.8514069318771362,
"learning_rate": 9.259007904196023e-06,
"loss": 0.7561,
"step": 352
},
{
"epoch": 0.776962582538518,
"grad_norm": 0.9069885015487671,
"learning_rate": 9.25227648124822e-06,
"loss": 0.7717,
"step": 353
},
{
"epoch": 0.7791636096845195,
"grad_norm": 0.8492381572723389,
"learning_rate": 9.24551709111097e-06,
"loss": 0.7894,
"step": 354
},
{
"epoch": 0.7813646368305209,
"grad_norm": 0.8018149137496948,
"learning_rate": 9.23872977824072e-06,
"loss": 0.7933,
"step": 355
},
{
"epoch": 0.7835656639765224,
"grad_norm": 0.8102025985717773,
"learning_rate": 9.231914587277579e-06,
"loss": 0.8026,
"step": 356
},
{
"epoch": 0.7857666911225238,
"grad_norm": 0.8567003607749939,
"learning_rate": 9.225071563045007e-06,
"loss": 0.7686,
"step": 357
},
{
"epoch": 0.7879677182685253,
"grad_norm": 0.7920061349868774,
"learning_rate": 9.218200750549517e-06,
"loss": 0.7436,
"step": 358
},
{
"epoch": 0.7901687454145268,
"grad_norm": 0.7901113629341125,
"learning_rate": 9.211302194980391e-06,
"loss": 0.8047,
"step": 359
},
{
"epoch": 0.7923697725605282,
"grad_norm": 0.8072230815887451,
"learning_rate": 9.204375941709377e-06,
"loss": 0.7661,
"step": 360
},
{
"epoch": 0.7945707997065297,
"grad_norm": 0.8206577301025391,
"learning_rate": 9.197422036290386e-06,
"loss": 0.764,
"step": 361
},
{
"epoch": 0.7967718268525312,
"grad_norm": 0.808757483959198,
"learning_rate": 9.190440524459203e-06,
"loss": 0.7633,
"step": 362
},
{
"epoch": 0.7989728539985327,
"grad_norm": 0.8489531874656677,
"learning_rate": 9.183431452133177e-06,
"loss": 0.8141,
"step": 363
},
{
"epoch": 0.8011738811445341,
"grad_norm": 0.8459470868110657,
"learning_rate": 9.176394865410922e-06,
"loss": 0.8165,
"step": 364
},
{
"epoch": 0.8033749082905356,
"grad_norm": 0.8386378884315491,
"learning_rate": 9.169330810572012e-06,
"loss": 0.8189,
"step": 365
},
{
"epoch": 0.805575935436537,
"grad_norm": 0.8197809457778931,
"learning_rate": 9.162239334076684e-06,
"loss": 0.7752,
"step": 366
},
{
"epoch": 0.8077769625825385,
"grad_norm": 0.8089394569396973,
"learning_rate": 9.15512048256552e-06,
"loss": 0.7892,
"step": 367
},
{
"epoch": 0.80997798972854,
"grad_norm": 0.7954054474830627,
"learning_rate": 9.147974302859158e-06,
"loss": 0.7939,
"step": 368
},
{
"epoch": 0.8121790168745414,
"grad_norm": 0.9242069125175476,
"learning_rate": 9.140800841957958e-06,
"loss": 0.7752,
"step": 369
},
{
"epoch": 0.8143800440205429,
"grad_norm": 0.8731237649917603,
"learning_rate": 9.133600147041723e-06,
"loss": 0.8005,
"step": 370
},
{
"epoch": 0.8165810711665444,
"grad_norm": 0.8165715336799622,
"learning_rate": 9.126372265469368e-06,
"loss": 0.7681,
"step": 371
},
{
"epoch": 0.8187820983125459,
"grad_norm": 0.9160674810409546,
"learning_rate": 9.119117244778609e-06,
"loss": 0.7908,
"step": 372
},
{
"epoch": 0.8209831254585473,
"grad_norm": 0.7941291928291321,
"learning_rate": 9.111835132685665e-06,
"loss": 0.7682,
"step": 373
},
{
"epoch": 0.8231841526045488,
"grad_norm": 0.9019894599914551,
"learning_rate": 9.104525977084928e-06,
"loss": 0.8021,
"step": 374
},
{
"epoch": 0.8253851797505503,
"grad_norm": 0.8680083751678467,
"learning_rate": 9.09718982604866e-06,
"loss": 0.7967,
"step": 375
},
{
"epoch": 0.8275862068965517,
"grad_norm": 0.8080365657806396,
"learning_rate": 9.08982672782667e-06,
"loss": 0.7987,
"step": 376
},
{
"epoch": 0.8297872340425532,
"grad_norm": 0.8596054315567017,
"learning_rate": 9.082436730845993e-06,
"loss": 0.8007,
"step": 377
},
{
"epoch": 0.8319882611885546,
"grad_norm": 0.7935712933540344,
"learning_rate": 9.07501988371059e-06,
"loss": 0.7812,
"step": 378
},
{
"epoch": 0.8341892883345561,
"grad_norm": 0.8184458017349243,
"learning_rate": 9.067576235200999e-06,
"loss": 0.767,
"step": 379
},
{
"epoch": 0.8363903154805576,
"grad_norm": 0.8405432105064392,
"learning_rate": 9.060105834274044e-06,
"loss": 0.7894,
"step": 380
},
{
"epoch": 0.8385913426265591,
"grad_norm": 0.9065471291542053,
"learning_rate": 9.05260873006249e-06,
"loss": 0.7936,
"step": 381
},
{
"epoch": 0.8407923697725606,
"grad_norm": 0.8618866205215454,
"learning_rate": 9.045084971874738e-06,
"loss": 0.8123,
"step": 382
},
{
"epoch": 0.842993396918562,
"grad_norm": 0.8184080719947815,
"learning_rate": 9.037534609194482e-06,
"loss": 0.7802,
"step": 383
},
{
"epoch": 0.8451944240645635,
"grad_norm": 0.8824077844619751,
"learning_rate": 9.029957691680404e-06,
"loss": 0.7908,
"step": 384
},
{
"epoch": 0.8473954512105649,
"grad_norm": 0.8772209286689758,
"learning_rate": 9.022354269165828e-06,
"loss": 0.7616,
"step": 385
},
{
"epoch": 0.8495964783565664,
"grad_norm": 0.916607677936554,
"learning_rate": 9.014724391658407e-06,
"loss": 0.8007,
"step": 386
},
{
"epoch": 0.8517975055025678,
"grad_norm": 0.8488522171974182,
"learning_rate": 9.007068109339783e-06,
"loss": 0.7664,
"step": 387
},
{
"epoch": 0.8539985326485693,
"grad_norm": 0.9058408737182617,
"learning_rate": 8.999385472565271e-06,
"loss": 0.7904,
"step": 388
},
{
"epoch": 0.8561995597945709,
"grad_norm": 0.8716396689414978,
"learning_rate": 8.991676531863507e-06,
"loss": 0.7726,
"step": 389
},
{
"epoch": 0.8584005869405723,
"grad_norm": 0.7919327616691589,
"learning_rate": 8.98394133793614e-06,
"loss": 0.7783,
"step": 390
},
{
"epoch": 0.8606016140865738,
"grad_norm": 0.8153389096260071,
"learning_rate": 8.976179941657478e-06,
"loss": 0.7849,
"step": 391
},
{
"epoch": 0.8628026412325752,
"grad_norm": 0.9036357402801514,
"learning_rate": 8.968392394074164e-06,
"loss": 0.7575,
"step": 392
},
{
"epoch": 0.8650036683785767,
"grad_norm": 0.8810394406318665,
"learning_rate": 8.960578746404837e-06,
"loss": 0.7861,
"step": 393
},
{
"epoch": 0.8672046955245781,
"grad_norm": 0.8427317142486572,
"learning_rate": 8.9527390500398e-06,
"loss": 0.8008,
"step": 394
},
{
"epoch": 0.8694057226705796,
"grad_norm": 0.87883460521698,
"learning_rate": 8.944873356540671e-06,
"loss": 0.8146,
"step": 395
},
{
"epoch": 0.871606749816581,
"grad_norm": 0.7933640480041504,
"learning_rate": 8.936981717640061e-06,
"loss": 0.7715,
"step": 396
},
{
"epoch": 0.8738077769625825,
"grad_norm": 0.8374460339546204,
"learning_rate": 8.929064185241214e-06,
"loss": 0.8053,
"step": 397
},
{
"epoch": 0.8760088041085841,
"grad_norm": 0.7720319032669067,
"learning_rate": 8.921120811417678e-06,
"loss": 0.7851,
"step": 398
},
{
"epoch": 0.8782098312545855,
"grad_norm": 0.8741126656532288,
"learning_rate": 8.913151648412963e-06,
"loss": 0.7947,
"step": 399
},
{
"epoch": 0.880410858400587,
"grad_norm": 0.868709146976471,
"learning_rate": 8.905156748640194e-06,
"loss": 0.7741,
"step": 400
},
{
"epoch": 0.8826118855465884,
"grad_norm": 0.8275803923606873,
"learning_rate": 8.897136164681763e-06,
"loss": 0.7853,
"step": 401
},
{
"epoch": 0.8848129126925899,
"grad_norm": 0.8300104737281799,
"learning_rate": 8.889089949288986e-06,
"loss": 0.7678,
"step": 402
},
{
"epoch": 0.8870139398385913,
"grad_norm": 0.9102062582969666,
"learning_rate": 8.881018155381766e-06,
"loss": 0.7869,
"step": 403
},
{
"epoch": 0.8892149669845928,
"grad_norm": 0.8648975491523743,
"learning_rate": 8.872920836048222e-06,
"loss": 0.781,
"step": 404
},
{
"epoch": 0.8914159941305942,
"grad_norm": 0.8916117548942566,
"learning_rate": 8.864798044544365e-06,
"loss": 0.7694,
"step": 405
},
{
"epoch": 0.8936170212765957,
"grad_norm": 0.9618800282478333,
"learning_rate": 8.85664983429373e-06,
"loss": 0.7617,
"step": 406
},
{
"epoch": 0.8958180484225972,
"grad_norm": 0.8170496225357056,
"learning_rate": 8.84847625888703e-06,
"loss": 0.7744,
"step": 407
},
{
"epoch": 0.8980190755685987,
"grad_norm": 0.8599210977554321,
"learning_rate": 8.840277372081812e-06,
"loss": 0.7982,
"step": 408
},
{
"epoch": 0.9002201027146002,
"grad_norm": 0.8268464207649231,
"learning_rate": 8.832053227802089e-06,
"loss": 0.7797,
"step": 409
},
{
"epoch": 0.9024211298606016,
"grad_norm": 0.8508797287940979,
"learning_rate": 8.823803880137993e-06,
"loss": 0.8013,
"step": 410
},
{
"epoch": 0.9046221570066031,
"grad_norm": 0.7934787273406982,
"learning_rate": 8.815529383345421e-06,
"loss": 0.7973,
"step": 411
},
{
"epoch": 0.9068231841526045,
"grad_norm": 0.8985304236412048,
"learning_rate": 8.807229791845673e-06,
"loss": 0.7894,
"step": 412
},
{
"epoch": 0.909024211298606,
"grad_norm": 0.8184648156166077,
"learning_rate": 8.7989051602251e-06,
"loss": 0.7735,
"step": 413
},
{
"epoch": 0.9112252384446075,
"grad_norm": 0.8377068042755127,
"learning_rate": 8.790555543234739e-06,
"loss": 0.8012,
"step": 414
},
{
"epoch": 0.9134262655906089,
"grad_norm": 0.8117819428443909,
"learning_rate": 8.782180995789953e-06,
"loss": 0.7787,
"step": 415
},
{
"epoch": 0.9156272927366104,
"grad_norm": 0.8211496472358704,
"learning_rate": 8.773781572970079e-06,
"loss": 0.7386,
"step": 416
},
{
"epoch": 0.9178283198826119,
"grad_norm": 0.8821373581886292,
"learning_rate": 8.765357330018056e-06,
"loss": 0.789,
"step": 417
},
{
"epoch": 0.9200293470286134,
"grad_norm": 0.7815222144126892,
"learning_rate": 8.756908322340063e-06,
"loss": 0.7937,
"step": 418
},
{
"epoch": 0.9222303741746148,
"grad_norm": 0.8329475522041321,
"learning_rate": 8.748434605505159e-06,
"loss": 0.7717,
"step": 419
},
{
"epoch": 0.9244314013206163,
"grad_norm": 0.9906356334686279,
"learning_rate": 8.739936235244913e-06,
"loss": 0.7812,
"step": 420
},
{
"epoch": 0.9266324284666178,
"grad_norm": 0.8735545873641968,
"learning_rate": 8.73141326745304e-06,
"loss": 0.7917,
"step": 421
},
{
"epoch": 0.9288334556126192,
"grad_norm": 0.825126051902771,
"learning_rate": 8.722865758185036e-06,
"loss": 0.7711,
"step": 422
},
{
"epoch": 0.9310344827586207,
"grad_norm": 0.84153151512146,
"learning_rate": 8.7142937636578e-06,
"loss": 0.8098,
"step": 423
},
{
"epoch": 0.9332355099046221,
"grad_norm": 0.8053434491157532,
"learning_rate": 8.705697340249275e-06,
"loss": 0.766,
"step": 424
},
{
"epoch": 0.9354365370506236,
"grad_norm": 0.8101866841316223,
"learning_rate": 8.69707654449807e-06,
"loss": 0.7995,
"step": 425
},
{
"epoch": 0.9376375641966251,
"grad_norm": 0.7579418420791626,
"learning_rate": 8.688431433103094e-06,
"loss": 0.7469,
"step": 426
},
{
"epoch": 0.9398385913426266,
"grad_norm": 0.8217850923538208,
"learning_rate": 8.679762062923176e-06,
"loss": 0.7795,
"step": 427
},
{
"epoch": 0.942039618488628,
"grad_norm": 0.7982969284057617,
"learning_rate": 8.671068490976695e-06,
"loss": 0.8058,
"step": 428
},
{
"epoch": 0.9442406456346295,
"grad_norm": 0.8068963885307312,
"learning_rate": 8.66235077444121e-06,
"loss": 0.7535,
"step": 429
},
{
"epoch": 0.946441672780631,
"grad_norm": 0.8331409096717834,
"learning_rate": 8.653608970653072e-06,
"loss": 0.7853,
"step": 430
},
{
"epoch": 0.9486426999266324,
"grad_norm": 0.8579436540603638,
"learning_rate": 8.644843137107058e-06,
"loss": 0.7668,
"step": 431
},
{
"epoch": 0.9508437270726339,
"grad_norm": 0.8737447261810303,
"learning_rate": 8.636053331455986e-06,
"loss": 0.7729,
"step": 432
},
{
"epoch": 0.9530447542186353,
"grad_norm": 0.8855342268943787,
"learning_rate": 8.627239611510343e-06,
"loss": 0.7919,
"step": 433
},
{
"epoch": 0.9552457813646368,
"grad_norm": 0.8600148558616638,
"learning_rate": 8.618402035237895e-06,
"loss": 0.7455,
"step": 434
},
{
"epoch": 0.9574468085106383,
"grad_norm": 0.8547468781471252,
"learning_rate": 8.60954066076331e-06,
"loss": 0.7614,
"step": 435
},
{
"epoch": 0.9596478356566398,
"grad_norm": 0.9474692940711975,
"learning_rate": 8.600655546367782e-06,
"loss": 0.7542,
"step": 436
},
{
"epoch": 0.9618488628026413,
"grad_norm": 0.8989570736885071,
"learning_rate": 8.591746750488639e-06,
"loss": 0.7542,
"step": 437
},
{
"epoch": 0.9640498899486427,
"grad_norm": 0.8280332684516907,
"learning_rate": 8.582814331718961e-06,
"loss": 0.7908,
"step": 438
},
{
"epoch": 0.9662509170946442,
"grad_norm": 0.9018235206604004,
"learning_rate": 8.5738583488072e-06,
"loss": 0.7811,
"step": 439
},
{
"epoch": 0.9684519442406456,
"grad_norm": 0.901766300201416,
"learning_rate": 8.564878860656784e-06,
"loss": 0.7359,
"step": 440
},
{
"epoch": 0.9706529713866471,
"grad_norm": 0.8833390474319458,
"learning_rate": 8.555875926325738e-06,
"loss": 0.7684,
"step": 441
},
{
"epoch": 0.9728539985326485,
"grad_norm": 0.8447439670562744,
"learning_rate": 8.54684960502629e-06,
"loss": 0.7431,
"step": 442
},
{
"epoch": 0.97505502567865,
"grad_norm": 0.9023051261901855,
"learning_rate": 8.537799956124486e-06,
"loss": 0.7531,
"step": 443
},
{
"epoch": 0.9772560528246516,
"grad_norm": 0.847751796245575,
"learning_rate": 8.528727039139796e-06,
"loss": 0.7602,
"step": 444
},
{
"epoch": 0.979457079970653,
"grad_norm": 0.8495941758155823,
"learning_rate": 8.519630913744726e-06,
"loss": 0.7629,
"step": 445
},
{
"epoch": 0.9816581071166545,
"grad_norm": 0.8522076606750488,
"learning_rate": 8.51051163976442e-06,
"loss": 0.7715,
"step": 446
},
{
"epoch": 0.9838591342626559,
"grad_norm": 0.9282381534576416,
"learning_rate": 8.501369277176275e-06,
"loss": 0.7799,
"step": 447
},
{
"epoch": 0.9860601614086574,
"grad_norm": 0.889005720615387,
"learning_rate": 8.492203886109538e-06,
"loss": 0.7855,
"step": 448
},
{
"epoch": 0.9882611885546588,
"grad_norm": 0.8765848875045776,
"learning_rate": 8.483015526844914e-06,
"loss": 0.7808,
"step": 449
},
{
"epoch": 0.9904622157006603,
"grad_norm": 0.8614368438720703,
"learning_rate": 8.473804259814173e-06,
"loss": 0.7912,
"step": 450
},
{
"epoch": 0.9926632428466617,
"grad_norm": 0.8306472897529602,
"learning_rate": 8.464570145599742e-06,
"loss": 0.7548,
"step": 451
},
{
"epoch": 0.9948642699926632,
"grad_norm": 0.8920493721961975,
"learning_rate": 8.455313244934324e-06,
"loss": 0.7607,
"step": 452
},
{
"epoch": 0.9970652971386648,
"grad_norm": 0.8534906506538391,
"learning_rate": 8.44603361870048e-06,
"loss": 0.7614,
"step": 453
},
{
"epoch": 0.9992663242846662,
"grad_norm": 0.7993549704551697,
"learning_rate": 8.43673132793024e-06,
"loss": 0.775,
"step": 454
},
{
"epoch": 1.0014673514306676,
"grad_norm": 0.8929709196090698,
"learning_rate": 8.4274064338047e-06,
"loss": 0.7155,
"step": 455
},
{
"epoch": 1.0036683785766691,
"grad_norm": 0.9513823986053467,
"learning_rate": 8.418058997653613e-06,
"loss": 0.6658,
"step": 456
},
{
"epoch": 1.0058694057226705,
"grad_norm": 0.7889032959938049,
"learning_rate": 8.408689080954997e-06,
"loss": 0.6645,
"step": 457
},
{
"epoch": 1.008070432868672,
"grad_norm": 0.8054494261741638,
"learning_rate": 8.399296745334723e-06,
"loss": 0.6809,
"step": 458
},
{
"epoch": 1.0102714600146736,
"grad_norm": 0.856062114238739,
"learning_rate": 8.389882052566106e-06,
"loss": 0.6527,
"step": 459
},
{
"epoch": 1.012472487160675,
"grad_norm": 0.9217520952224731,
"learning_rate": 8.380445064569506e-06,
"loss": 0.6563,
"step": 460
},
{
"epoch": 1.0146735143066765,
"grad_norm": 0.8915321230888367,
"learning_rate": 8.370985843411924e-06,
"loss": 0.6422,
"step": 461
},
{
"epoch": 1.0168745414526779,
"grad_norm": 0.8341991901397705,
"learning_rate": 8.361504451306585e-06,
"loss": 0.6385,
"step": 462
},
{
"epoch": 1.0190755685986794,
"grad_norm": 0.8089377284049988,
"learning_rate": 8.352000950612526e-06,
"loss": 0.6738,
"step": 463
},
{
"epoch": 1.0212765957446808,
"grad_norm": 0.8495323657989502,
"learning_rate": 8.342475403834203e-06,
"loss": 0.6799,
"step": 464
},
{
"epoch": 1.0234776228906823,
"grad_norm": 0.8130814433097839,
"learning_rate": 8.332927873621059e-06,
"loss": 0.6588,
"step": 465
},
{
"epoch": 1.0256786500366837,
"grad_norm": 0.7826944589614868,
"learning_rate": 8.32335842276713e-06,
"loss": 0.6607,
"step": 466
},
{
"epoch": 1.0278796771826852,
"grad_norm": 0.776006817817688,
"learning_rate": 8.313767114210615e-06,
"loss": 0.645,
"step": 467
},
{
"epoch": 1.0300807043286868,
"grad_norm": 0.7997714877128601,
"learning_rate": 8.30415401103348e-06,
"loss": 0.6484,
"step": 468
},
{
"epoch": 1.0322817314746882,
"grad_norm": 0.798355758190155,
"learning_rate": 8.29451917646103e-06,
"loss": 0.6547,
"step": 469
},
{
"epoch": 1.0344827586206897,
"grad_norm": 0.813959002494812,
"learning_rate": 8.284862673861498e-06,
"loss": 0.6503,
"step": 470
},
{
"epoch": 1.036683785766691,
"grad_norm": 0.7728633284568787,
"learning_rate": 8.275184566745625e-06,
"loss": 0.632,
"step": 471
},
{
"epoch": 1.0388848129126926,
"grad_norm": 0.7462618947029114,
"learning_rate": 8.265484918766243e-06,
"loss": 0.6417,
"step": 472
},
{
"epoch": 1.041085840058694,
"grad_norm": 0.762089192867279,
"learning_rate": 8.255763793717868e-06,
"loss": 0.6204,
"step": 473
},
{
"epoch": 1.0432868672046955,
"grad_norm": 0.846603274345398,
"learning_rate": 8.24602125553626e-06,
"loss": 0.6581,
"step": 474
},
{
"epoch": 1.045487894350697,
"grad_norm": 0.8219186067581177,
"learning_rate": 8.236257368298022e-06,
"loss": 0.6003,
"step": 475
},
{
"epoch": 1.0476889214966985,
"grad_norm": 0.9549553394317627,
"learning_rate": 8.226472196220156e-06,
"loss": 0.673,
"step": 476
},
{
"epoch": 1.0498899486427,
"grad_norm": 0.8765217661857605,
"learning_rate": 8.216665803659671e-06,
"loss": 0.6582,
"step": 477
},
{
"epoch": 1.0520909757887014,
"grad_norm": 0.7813266515731812,
"learning_rate": 8.206838255113132e-06,
"loss": 0.6536,
"step": 478
},
{
"epoch": 1.054292002934703,
"grad_norm": 0.8416471481323242,
"learning_rate": 8.196989615216248e-06,
"loss": 0.6641,
"step": 479
},
{
"epoch": 1.0564930300807043,
"grad_norm": 0.7775030136108398,
"learning_rate": 8.18711994874345e-06,
"loss": 0.6268,
"step": 480
},
{
"epoch": 1.0586940572267058,
"grad_norm": 0.7364187240600586,
"learning_rate": 8.177229320607455e-06,
"loss": 0.6641,
"step": 481
},
{
"epoch": 1.0608950843727072,
"grad_norm": 0.8157299757003784,
"learning_rate": 8.16731779585885e-06,
"loss": 0.638,
"step": 482
},
{
"epoch": 1.0630961115187088,
"grad_norm": 0.7496952414512634,
"learning_rate": 8.157385439685656e-06,
"loss": 0.6214,
"step": 483
},
{
"epoch": 1.06529713866471,
"grad_norm": 0.7788705229759216,
"learning_rate": 8.147432317412902e-06,
"loss": 0.6658,
"step": 484
},
{
"epoch": 1.0674981658107117,
"grad_norm": 0.7792511582374573,
"learning_rate": 8.137458494502195e-06,
"loss": 0.6765,
"step": 485
},
{
"epoch": 1.0696991929567132,
"grad_norm": 0.8139557242393494,
"learning_rate": 8.127464036551294e-06,
"loss": 0.6544,
"step": 486
},
{
"epoch": 1.0719002201027146,
"grad_norm": 0.8252546191215515,
"learning_rate": 8.117449009293668e-06,
"loss": 0.6673,
"step": 487
},
{
"epoch": 1.0741012472487161,
"grad_norm": 0.7704468965530396,
"learning_rate": 8.107413478598076e-06,
"loss": 0.6372,
"step": 488
},
{
"epoch": 1.0763022743947175,
"grad_norm": 0.8226885199546814,
"learning_rate": 8.097357510468125e-06,
"loss": 0.6497,
"step": 489
},
{
"epoch": 1.078503301540719,
"grad_norm": 0.7852274775505066,
"learning_rate": 8.087281171041838e-06,
"loss": 0.632,
"step": 490
},
{
"epoch": 1.0807043286867204,
"grad_norm": 0.8781352639198303,
"learning_rate": 8.077184526591224e-06,
"loss": 0.6347,
"step": 491
},
{
"epoch": 1.082905355832722,
"grad_norm": 0.8479652404785156,
"learning_rate": 8.067067643521834e-06,
"loss": 0.6664,
"step": 492
},
{
"epoch": 1.0851063829787233,
"grad_norm": 0.7649582028388977,
"learning_rate": 8.056930588372329e-06,
"loss": 0.6456,
"step": 493
},
{
"epoch": 1.0873074101247249,
"grad_norm": 0.7952910661697388,
"learning_rate": 8.046773427814043e-06,
"loss": 0.6445,
"step": 494
},
{
"epoch": 1.0895084372707264,
"grad_norm": 0.8468136787414551,
"learning_rate": 8.03659622865054e-06,
"loss": 0.6607,
"step": 495
},
{
"epoch": 1.0917094644167278,
"grad_norm": 0.8044291138648987,
"learning_rate": 8.026399057817182e-06,
"loss": 0.6607,
"step": 496
},
{
"epoch": 1.0939104915627293,
"grad_norm": 0.7467012405395508,
"learning_rate": 8.016181982380682e-06,
"loss": 0.6677,
"step": 497
},
{
"epoch": 1.0961115187087307,
"grad_norm": 0.8444515466690063,
"learning_rate": 8.005945069538668e-06,
"loss": 0.6454,
"step": 498
},
{
"epoch": 1.0983125458547323,
"grad_norm": 0.7249956727027893,
"learning_rate": 7.995688386619233e-06,
"loss": 0.624,
"step": 499
},
{
"epoch": 1.1005135730007336,
"grad_norm": 0.7788255214691162,
"learning_rate": 7.985412001080503e-06,
"loss": 0.6619,
"step": 500
},
{
"epoch": 1.1027146001467352,
"grad_norm": 0.7383993268013,
"learning_rate": 7.975115980510187e-06,
"loss": 0.6356,
"step": 501
},
{
"epoch": 1.1049156272927365,
"grad_norm": 0.7658706307411194,
"learning_rate": 7.96480039262513e-06,
"loss": 0.6395,
"step": 502
},
{
"epoch": 1.107116654438738,
"grad_norm": 0.7822476029396057,
"learning_rate": 7.954465305270875e-06,
"loss": 0.6793,
"step": 503
},
{
"epoch": 1.1093176815847396,
"grad_norm": 0.7769404053688049,
"learning_rate": 7.94411078642121e-06,
"loss": 0.6616,
"step": 504
},
{
"epoch": 1.111518708730741,
"grad_norm": 0.7475581765174866,
"learning_rate": 7.933736904177727e-06,
"loss": 0.6485,
"step": 505
},
{
"epoch": 1.1137197358767426,
"grad_norm": 0.7527409195899963,
"learning_rate": 7.923343726769368e-06,
"loss": 0.6489,
"step": 506
},
{
"epoch": 1.115920763022744,
"grad_norm": 0.820989727973938,
"learning_rate": 7.912931322551981e-06,
"loss": 0.6551,
"step": 507
},
{
"epoch": 1.1181217901687455,
"grad_norm": 0.7625864744186401,
"learning_rate": 7.902499760007867e-06,
"loss": 0.6773,
"step": 508
},
{
"epoch": 1.1203228173147468,
"grad_norm": 0.7642733454704285,
"learning_rate": 7.892049107745334e-06,
"loss": 0.6068,
"step": 509
},
{
"epoch": 1.1225238444607484,
"grad_norm": 0.7689752578735352,
"learning_rate": 7.881579434498239e-06,
"loss": 0.6546,
"step": 510
},
{
"epoch": 1.1247248716067497,
"grad_norm": 0.734605073928833,
"learning_rate": 7.87109080912554e-06,
"loss": 0.6346,
"step": 511
},
{
"epoch": 1.1269258987527513,
"grad_norm": 0.7789011001586914,
"learning_rate": 7.860583300610849e-06,
"loss": 0.6361,
"step": 512
},
{
"epoch": 1.1291269258987526,
"grad_norm": 0.8192563652992249,
"learning_rate": 7.850056978061966e-06,
"loss": 0.6112,
"step": 513
},
{
"epoch": 1.1313279530447542,
"grad_norm": 0.8304346799850464,
"learning_rate": 7.839511910710431e-06,
"loss": 0.6589,
"step": 514
},
{
"epoch": 1.1335289801907558,
"grad_norm": 0.785780131816864,
"learning_rate": 7.828948167911073e-06,
"loss": 0.6504,
"step": 515
},
{
"epoch": 1.135730007336757,
"grad_norm": 0.8830695748329163,
"learning_rate": 7.818365819141545e-06,
"loss": 0.6524,
"step": 516
},
{
"epoch": 1.1379310344827587,
"grad_norm": 0.796280026435852,
"learning_rate": 7.807764934001875e-06,
"loss": 0.6409,
"step": 517
},
{
"epoch": 1.14013206162876,
"grad_norm": 0.7574799060821533,
"learning_rate": 7.797145582213998e-06,
"loss": 0.6405,
"step": 518
},
{
"epoch": 1.1423330887747616,
"grad_norm": 0.8431347012519836,
"learning_rate": 7.786507833621314e-06,
"loss": 0.6774,
"step": 519
},
{
"epoch": 1.144534115920763,
"grad_norm": 0.8179629445075989,
"learning_rate": 7.775851758188209e-06,
"loss": 0.6543,
"step": 520
},
{
"epoch": 1.1467351430667645,
"grad_norm": 0.8065217137336731,
"learning_rate": 7.765177425999609e-06,
"loss": 0.6524,
"step": 521
},
{
"epoch": 1.148936170212766,
"grad_norm": 0.8660486340522766,
"learning_rate": 7.754484907260513e-06,
"loss": 0.6516,
"step": 522
},
{
"epoch": 1.1511371973587674,
"grad_norm": 0.7900784015655518,
"learning_rate": 7.743774272295538e-06,
"loss": 0.6296,
"step": 523
},
{
"epoch": 1.153338224504769,
"grad_norm": 0.837182879447937,
"learning_rate": 7.733045591548442e-06,
"loss": 0.6479,
"step": 524
},
{
"epoch": 1.1555392516507703,
"grad_norm": 0.8670573830604553,
"learning_rate": 7.72229893558168e-06,
"loss": 0.6477,
"step": 525
},
{
"epoch": 1.1577402787967719,
"grad_norm": 0.7950059771537781,
"learning_rate": 7.711534375075923e-06,
"loss": 0.655,
"step": 526
},
{
"epoch": 1.1599413059427732,
"grad_norm": 0.7337070107460022,
"learning_rate": 7.700751980829601e-06,
"loss": 0.6577,
"step": 527
},
{
"epoch": 1.1621423330887748,
"grad_norm": 0.8407183885574341,
"learning_rate": 7.689951823758439e-06,
"loss": 0.6174,
"step": 528
},
{
"epoch": 1.1643433602347761,
"grad_norm": 0.8000947833061218,
"learning_rate": 7.679133974894984e-06,
"loss": 0.6519,
"step": 529
},
{
"epoch": 1.1665443873807777,
"grad_norm": 0.8611817359924316,
"learning_rate": 7.668298505388146e-06,
"loss": 0.6642,
"step": 530
},
{
"epoch": 1.168745414526779,
"grad_norm": 0.8054713606834412,
"learning_rate": 7.657445486502723e-06,
"loss": 0.6672,
"step": 531
},
{
"epoch": 1.1709464416727806,
"grad_norm": 0.8271917104721069,
"learning_rate": 7.646574989618938e-06,
"loss": 0.6859,
"step": 532
},
{
"epoch": 1.1731474688187822,
"grad_norm": 0.7930768728256226,
"learning_rate": 7.635687086231962e-06,
"loss": 0.6469,
"step": 533
},
{
"epoch": 1.1753484959647835,
"grad_norm": 0.7981510758399963,
"learning_rate": 7.624781847951453e-06,
"loss": 0.6612,
"step": 534
},
{
"epoch": 1.177549523110785,
"grad_norm": 0.7277331352233887,
"learning_rate": 7.613859346501078e-06,
"loss": 0.6514,
"step": 535
},
{
"epoch": 1.1797505502567864,
"grad_norm": 0.7340840697288513,
"learning_rate": 7.602919653718044e-06,
"loss": 0.6683,
"step": 536
},
{
"epoch": 1.181951577402788,
"grad_norm": 0.7950836420059204,
"learning_rate": 7.591962841552627e-06,
"loss": 0.6617,
"step": 537
},
{
"epoch": 1.1841526045487893,
"grad_norm": 0.8152914643287659,
"learning_rate": 7.580988982067694e-06,
"loss": 0.6418,
"step": 538
},
{
"epoch": 1.186353631694791,
"grad_norm": 0.7889769673347473,
"learning_rate": 7.569998147438233e-06,
"loss": 0.6647,
"step": 539
},
{
"epoch": 1.1885546588407925,
"grad_norm": 0.74322909116745,
"learning_rate": 7.558990409950881e-06,
"loss": 0.6536,
"step": 540
},
{
"epoch": 1.1907556859867938,
"grad_norm": 0.8705032467842102,
"learning_rate": 7.5479658420034415e-06,
"loss": 0.6671,
"step": 541
},
{
"epoch": 1.1929567131327954,
"grad_norm": 0.7831534743309021,
"learning_rate": 7.536924516104411e-06,
"loss": 0.6732,
"step": 542
},
{
"epoch": 1.1951577402787967,
"grad_norm": 0.7445375919342041,
"learning_rate": 7.5258665048725065e-06,
"loss": 0.6229,
"step": 543
},
{
"epoch": 1.1973587674247983,
"grad_norm": 0.7821725606918335,
"learning_rate": 7.514791881036179e-06,
"loss": 0.6469,
"step": 544
},
{
"epoch": 1.1995597945707996,
"grad_norm": 0.810231626033783,
"learning_rate": 7.503700717433145e-06,
"loss": 0.6466,
"step": 545
},
{
"epoch": 1.2017608217168012,
"grad_norm": 0.746338427066803,
"learning_rate": 7.492593087009903e-06,
"loss": 0.6419,
"step": 546
},
{
"epoch": 1.2039618488628026,
"grad_norm": 0.7336586117744446,
"learning_rate": 7.481469062821252e-06,
"loss": 0.6417,
"step": 547
},
{
"epoch": 1.2061628760088041,
"grad_norm": 0.8493391871452332,
"learning_rate": 7.470328718029809e-06,
"loss": 0.6437,
"step": 548
},
{
"epoch": 1.2083639031548055,
"grad_norm": 0.7558215856552124,
"learning_rate": 7.45917212590554e-06,
"loss": 0.653,
"step": 549
},
{
"epoch": 1.210564930300807,
"grad_norm": 0.7480248212814331,
"learning_rate": 7.447999359825263e-06,
"loss": 0.6194,
"step": 550
},
{
"epoch": 1.2127659574468086,
"grad_norm": 0.7975335717201233,
"learning_rate": 7.436810493272174e-06,
"loss": 0.6665,
"step": 551
},
{
"epoch": 1.21496698459281,
"grad_norm": 0.7977692484855652,
"learning_rate": 7.42560559983536e-06,
"loss": 0.6561,
"step": 552
},
{
"epoch": 1.2171680117388115,
"grad_norm": 0.7404007315635681,
"learning_rate": 7.414384753209323e-06,
"loss": 0.6563,
"step": 553
},
{
"epoch": 1.2193690388848129,
"grad_norm": 0.7981791496276855,
"learning_rate": 7.403148027193479e-06,
"loss": 0.6744,
"step": 554
},
{
"epoch": 1.2215700660308144,
"grad_norm": 0.7709721922874451,
"learning_rate": 7.39189549569169e-06,
"loss": 0.6231,
"step": 555
},
{
"epoch": 1.2237710931768158,
"grad_norm": 0.7501435279846191,
"learning_rate": 7.380627232711769e-06,
"loss": 0.6291,
"step": 556
},
{
"epoch": 1.2259721203228173,
"grad_norm": 0.7685399651527405,
"learning_rate": 7.369343312364994e-06,
"loss": 0.6593,
"step": 557
},
{
"epoch": 1.228173147468819,
"grad_norm": 0.7429911494255066,
"learning_rate": 7.358043808865621e-06,
"loss": 0.6476,
"step": 558
},
{
"epoch": 1.2303741746148202,
"grad_norm": 0.7684638500213623,
"learning_rate": 7.346728796530398e-06,
"loss": 0.6658,
"step": 559
},
{
"epoch": 1.2325752017608218,
"grad_norm": 0.7467631697654724,
"learning_rate": 7.3353983497780725e-06,
"loss": 0.6323,
"step": 560
},
{
"epoch": 1.2347762289068231,
"grad_norm": 0.8210097551345825,
"learning_rate": 7.324052543128904e-06,
"loss": 0.6299,
"step": 561
},
{
"epoch": 1.2369772560528247,
"grad_norm": 0.7603449821472168,
"learning_rate": 7.312691451204178e-06,
"loss": 0.6746,
"step": 562
},
{
"epoch": 1.239178283198826,
"grad_norm": 0.6765388250350952,
"learning_rate": 7.301315148725704e-06,
"loss": 0.6537,
"step": 563
},
{
"epoch": 1.2413793103448276,
"grad_norm": 0.7473892569541931,
"learning_rate": 7.289923710515338e-06,
"loss": 0.6734,
"step": 564
},
{
"epoch": 1.243580337490829,
"grad_norm": 0.8171759843826294,
"learning_rate": 7.278517211494481e-06,
"loss": 0.6282,
"step": 565
},
{
"epoch": 1.2457813646368305,
"grad_norm": 0.7920976877212524,
"learning_rate": 7.267095726683587e-06,
"loss": 0.6467,
"step": 566
},
{
"epoch": 1.2479823917828319,
"grad_norm": 0.7972446084022522,
"learning_rate": 7.255659331201673e-06,
"loss": 0.6265,
"step": 567
},
{
"epoch": 1.2501834189288334,
"grad_norm": 0.7882792949676514,
"learning_rate": 7.244208100265826e-06,
"loss": 0.6783,
"step": 568
},
{
"epoch": 1.252384446074835,
"grad_norm": 0.830058217048645,
"learning_rate": 7.2327421091907006e-06,
"loss": 0.6625,
"step": 569
},
{
"epoch": 1.2545854732208364,
"grad_norm": 0.750026285648346,
"learning_rate": 7.221261433388032e-06,
"loss": 0.6466,
"step": 570
},
{
"epoch": 1.256786500366838,
"grad_norm": 0.7765890955924988,
"learning_rate": 7.2097661483661355e-06,
"loss": 0.6558,
"step": 571
},
{
"epoch": 1.2589875275128393,
"grad_norm": 0.7946206331253052,
"learning_rate": 7.198256329729412e-06,
"loss": 0.6241,
"step": 572
},
{
"epoch": 1.2611885546588408,
"grad_norm": 0.805760383605957,
"learning_rate": 7.186732053177848e-06,
"loss": 0.6465,
"step": 573
},
{
"epoch": 1.2633895818048422,
"grad_norm": 0.7542381286621094,
"learning_rate": 7.175193394506523e-06,
"loss": 0.6451,
"step": 574
},
{
"epoch": 1.2655906089508437,
"grad_norm": 0.7699628472328186,
"learning_rate": 7.163640429605106e-06,
"loss": 0.631,
"step": 575
},
{
"epoch": 1.2677916360968453,
"grad_norm": 0.7826363444328308,
"learning_rate": 7.152073234457358e-06,
"loss": 0.6435,
"step": 576
},
{
"epoch": 1.2699926632428467,
"grad_norm": 0.8631653785705566,
"learning_rate": 7.140491885140629e-06,
"loss": 0.6573,
"step": 577
},
{
"epoch": 1.272193690388848,
"grad_norm": 0.7720036506652832,
"learning_rate": 7.128896457825364e-06,
"loss": 0.6589,
"step": 578
},
{
"epoch": 1.2743947175348496,
"grad_norm": 0.7530826330184937,
"learning_rate": 7.117287028774601e-06,
"loss": 0.6613,
"step": 579
},
{
"epoch": 1.2765957446808511,
"grad_norm": 0.7781434059143066,
"learning_rate": 7.105663674343462e-06,
"loss": 0.6505,
"step": 580
},
{
"epoch": 1.2787967718268525,
"grad_norm": 0.8403761386871338,
"learning_rate": 7.094026470978663e-06,
"loss": 0.6522,
"step": 581
},
{
"epoch": 1.280997798972854,
"grad_norm": 0.7812175154685974,
"learning_rate": 7.082375495217996e-06,
"loss": 0.6501,
"step": 582
},
{
"epoch": 1.2831988261188554,
"grad_norm": 0.783920407295227,
"learning_rate": 7.070710823689841e-06,
"loss": 0.6261,
"step": 583
},
{
"epoch": 1.285399853264857,
"grad_norm": 0.8044145107269287,
"learning_rate": 7.059032533112652e-06,
"loss": 0.6422,
"step": 584
},
{
"epoch": 1.2876008804108583,
"grad_norm": 0.849455714225769,
"learning_rate": 7.047340700294454e-06,
"loss": 0.6498,
"step": 585
},
{
"epoch": 1.2898019075568599,
"grad_norm": 0.8246210813522339,
"learning_rate": 7.03563540213234e-06,
"loss": 0.6118,
"step": 586
},
{
"epoch": 1.2920029347028614,
"grad_norm": 0.7495859265327454,
"learning_rate": 7.023916715611969e-06,
"loss": 0.6498,
"step": 587
},
{
"epoch": 1.2942039618488628,
"grad_norm": 0.7481572031974792,
"learning_rate": 7.012184717807051e-06,
"loss": 0.666,
"step": 588
},
{
"epoch": 1.2964049889948643,
"grad_norm": 0.7745373249053955,
"learning_rate": 7.000439485878841e-06,
"loss": 0.6418,
"step": 589
},
{
"epoch": 1.2986060161408657,
"grad_norm": 0.7574506998062134,
"learning_rate": 6.98868109707564e-06,
"loss": 0.6288,
"step": 590
},
{
"epoch": 1.3008070432868672,
"grad_norm": 0.7750707268714905,
"learning_rate": 6.97690962873228e-06,
"loss": 0.6676,
"step": 591
},
{
"epoch": 1.3030080704328686,
"grad_norm": 0.7187142968177795,
"learning_rate": 6.965125158269619e-06,
"loss": 0.6451,
"step": 592
},
{
"epoch": 1.3052090975788702,
"grad_norm": 0.7737485766410828,
"learning_rate": 6.953327763194026e-06,
"loss": 0.648,
"step": 593
},
{
"epoch": 1.3074101247248717,
"grad_norm": 0.776098906993866,
"learning_rate": 6.941517521096877e-06,
"loss": 0.6352,
"step": 594
},
{
"epoch": 1.309611151870873,
"grad_norm": 0.7563000917434692,
"learning_rate": 6.929694509654043e-06,
"loss": 0.6578,
"step": 595
},
{
"epoch": 1.3118121790168744,
"grad_norm": 0.7544032335281372,
"learning_rate": 6.917858806625377e-06,
"loss": 0.659,
"step": 596
},
{
"epoch": 1.314013206162876,
"grad_norm": 0.7261113524436951,
"learning_rate": 6.906010489854209e-06,
"loss": 0.6745,
"step": 597
},
{
"epoch": 1.3162142333088775,
"grad_norm": 0.788115918636322,
"learning_rate": 6.894149637266825e-06,
"loss": 0.6666,
"step": 598
},
{
"epoch": 1.318415260454879,
"grad_norm": 0.7851533889770508,
"learning_rate": 6.88227632687196e-06,
"loss": 0.6699,
"step": 599
},
{
"epoch": 1.3206162876008805,
"grad_norm": 0.7666875720024109,
"learning_rate": 6.870390636760286e-06,
"loss": 0.6618,
"step": 600
},
{
"epoch": 1.3228173147468818,
"grad_norm": 0.7792441248893738,
"learning_rate": 6.858492645103894e-06,
"loss": 0.6515,
"step": 601
},
{
"epoch": 1.3250183418928834,
"grad_norm": 0.8469759821891785,
"learning_rate": 6.846582430155783e-06,
"loss": 0.6842,
"step": 602
},
{
"epoch": 1.3272193690388847,
"grad_norm": 0.7582294940948486,
"learning_rate": 6.834660070249343e-06,
"loss": 0.6908,
"step": 603
},
{
"epoch": 1.3294203961848863,
"grad_norm": 0.7895419001579285,
"learning_rate": 6.822725643797844e-06,
"loss": 0.6774,
"step": 604
},
{
"epoch": 1.3316214233308878,
"grad_norm": 0.7758541107177734,
"learning_rate": 6.810779229293917e-06,
"loss": 0.6491,
"step": 605
},
{
"epoch": 1.3338224504768892,
"grad_norm": 0.7165260910987854,
"learning_rate": 6.798820905309036e-06,
"loss": 0.6382,
"step": 606
},
{
"epoch": 1.3360234776228908,
"grad_norm": 0.755100429058075,
"learning_rate": 6.786850750493006e-06,
"loss": 0.6576,
"step": 607
},
{
"epoch": 1.338224504768892,
"grad_norm": 0.7716681361198425,
"learning_rate": 6.774868843573441e-06,
"loss": 0.6564,
"step": 608
},
{
"epoch": 1.3404255319148937,
"grad_norm": 0.7416461110115051,
"learning_rate": 6.7628752633552505e-06,
"loss": 0.6221,
"step": 609
},
{
"epoch": 1.342626559060895,
"grad_norm": 0.7217958569526672,
"learning_rate": 6.750870088720122e-06,
"loss": 0.6373,
"step": 610
},
{
"epoch": 1.3448275862068966,
"grad_norm": 0.7555177807807922,
"learning_rate": 6.738853398625993e-06,
"loss": 0.662,
"step": 611
},
{
"epoch": 1.3470286133528981,
"grad_norm": 0.7566019296646118,
"learning_rate": 6.726825272106539e-06,
"loss": 0.6576,
"step": 612
},
{
"epoch": 1.3492296404988995,
"grad_norm": 0.7588501572608948,
"learning_rate": 6.714785788270658e-06,
"loss": 0.6238,
"step": 613
},
{
"epoch": 1.3514306676449008,
"grad_norm": 0.7500647902488708,
"learning_rate": 6.702735026301942e-06,
"loss": 0.6491,
"step": 614
},
{
"epoch": 1.3536316947909024,
"grad_norm": 0.8111644387245178,
"learning_rate": 6.690673065458158e-06,
"loss": 0.6673,
"step": 615
},
{
"epoch": 1.355832721936904,
"grad_norm": 0.7274529337882996,
"learning_rate": 6.678599985070728e-06,
"loss": 0.6199,
"step": 616
},
{
"epoch": 1.3580337490829053,
"grad_norm": 0.7605862617492676,
"learning_rate": 6.66651586454421e-06,
"loss": 0.6853,
"step": 617
},
{
"epoch": 1.3602347762289069,
"grad_norm": 0.8395547866821289,
"learning_rate": 6.65442078335577e-06,
"loss": 0.6519,
"step": 618
},
{
"epoch": 1.3624358033749082,
"grad_norm": 0.8221648931503296,
"learning_rate": 6.642314821054663e-06,
"loss": 0.6464,
"step": 619
},
{
"epoch": 1.3646368305209098,
"grad_norm": 0.7577537298202515,
"learning_rate": 6.63019805726171e-06,
"loss": 0.6338,
"step": 620
},
{
"epoch": 1.3668378576669111,
"grad_norm": 0.7528075575828552,
"learning_rate": 6.618070571668771e-06,
"loss": 0.6452,
"step": 621
},
{
"epoch": 1.3690388848129127,
"grad_norm": 0.7857430577278137,
"learning_rate": 6.605932444038229e-06,
"loss": 0.6556,
"step": 622
},
{
"epoch": 1.3712399119589143,
"grad_norm": 0.7965130805969238,
"learning_rate": 6.59378375420245e-06,
"loss": 0.6705,
"step": 623
},
{
"epoch": 1.3734409391049156,
"grad_norm": 0.7695780396461487,
"learning_rate": 6.5816245820632745e-06,
"loss": 0.6809,
"step": 624
},
{
"epoch": 1.3756419662509172,
"grad_norm": 0.7157763242721558,
"learning_rate": 6.569455007591485e-06,
"loss": 0.6331,
"step": 625
},
{
"epoch": 1.3778429933969185,
"grad_norm": 0.76363605260849,
"learning_rate": 6.557275110826277e-06,
"loss": 0.6736,
"step": 626
},
{
"epoch": 1.38004402054292,
"grad_norm": 0.7223920822143555,
"learning_rate": 6.545084971874738e-06,
"loss": 0.6675,
"step": 627
},
{
"epoch": 1.3822450476889214,
"grad_norm": 0.7648452520370483,
"learning_rate": 6.532884670911317e-06,
"loss": 0.6573,
"step": 628
},
{
"epoch": 1.384446074834923,
"grad_norm": 0.7381333112716675,
"learning_rate": 6.5206742881772975e-06,
"loss": 0.6428,
"step": 629
},
{
"epoch": 1.3866471019809246,
"grad_norm": 0.6984859704971313,
"learning_rate": 6.508453903980275e-06,
"loss": 0.6297,
"step": 630
},
{
"epoch": 1.388848129126926,
"grad_norm": 0.7828774452209473,
"learning_rate": 6.496223598693619e-06,
"loss": 0.6232,
"step": 631
},
{
"epoch": 1.3910491562729272,
"grad_norm": 0.7228440046310425,
"learning_rate": 6.483983452755953e-06,
"loss": 0.6475,
"step": 632
},
{
"epoch": 1.3932501834189288,
"grad_norm": 0.7507853507995605,
"learning_rate": 6.471733546670624e-06,
"loss": 0.6511,
"step": 633
},
{
"epoch": 1.3954512105649304,
"grad_norm": 0.7256352305412292,
"learning_rate": 6.459473961005168e-06,
"loss": 0.6554,
"step": 634
},
{
"epoch": 1.3976522377109317,
"grad_norm": 0.777930736541748,
"learning_rate": 6.447204776390783e-06,
"loss": 0.621,
"step": 635
},
{
"epoch": 1.3998532648569333,
"grad_norm": 0.770029604434967,
"learning_rate": 6.434926073521804e-06,
"loss": 0.6353,
"step": 636
},
{
"epoch": 1.4020542920029346,
"grad_norm": 0.7469916939735413,
"learning_rate": 6.4226379331551625e-06,
"loss": 0.6484,
"step": 637
},
{
"epoch": 1.4042553191489362,
"grad_norm": 0.7436196208000183,
"learning_rate": 6.410340436109864e-06,
"loss": 0.6522,
"step": 638
},
{
"epoch": 1.4064563462949375,
"grad_norm": 0.7931221723556519,
"learning_rate": 6.3980336632664505e-06,
"loss": 0.6252,
"step": 639
},
{
"epoch": 1.408657373440939,
"grad_norm": 0.7340280413627625,
"learning_rate": 6.385717695566472e-06,
"loss": 0.6305,
"step": 640
},
{
"epoch": 1.4108584005869407,
"grad_norm": 0.7154629826545715,
"learning_rate": 6.373392614011952e-06,
"loss": 0.6417,
"step": 641
},
{
"epoch": 1.413059427732942,
"grad_norm": 0.7936920523643494,
"learning_rate": 6.361058499664856e-06,
"loss": 0.6538,
"step": 642
},
{
"epoch": 1.4152604548789436,
"grad_norm": 0.7220768332481384,
"learning_rate": 6.348715433646559e-06,
"loss": 0.6311,
"step": 643
},
{
"epoch": 1.417461482024945,
"grad_norm": 0.7281099557876587,
"learning_rate": 6.336363497137311e-06,
"loss": 0.6602,
"step": 644
},
{
"epoch": 1.4196625091709465,
"grad_norm": 0.7421095371246338,
"learning_rate": 6.324002771375704e-06,
"loss": 0.6363,
"step": 645
},
{
"epoch": 1.4218635363169478,
"grad_norm": 0.7735316157341003,
"learning_rate": 6.311633337658132e-06,
"loss": 0.6603,
"step": 646
},
{
"epoch": 1.4240645634629494,
"grad_norm": 0.7439554333686829,
"learning_rate": 6.299255277338265e-06,
"loss": 0.6327,
"step": 647
},
{
"epoch": 1.426265590608951,
"grad_norm": 0.7588260769844055,
"learning_rate": 6.286868671826513e-06,
"loss": 0.6337,
"step": 648
},
{
"epoch": 1.4284666177549523,
"grad_norm": 0.7600772976875305,
"learning_rate": 6.274473602589481e-06,
"loss": 0.6826,
"step": 649
},
{
"epoch": 1.4306676449009537,
"grad_norm": 0.8217368721961975,
"learning_rate": 6.262070151149447e-06,
"loss": 0.6276,
"step": 650
},
{
"epoch": 1.4328686720469552,
"grad_norm": 0.7889801263809204,
"learning_rate": 6.249658399083811e-06,
"loss": 0.6367,
"step": 651
},
{
"epoch": 1.4350696991929568,
"grad_norm": 0.7394490242004395,
"learning_rate": 6.237238428024573e-06,
"loss": 0.6473,
"step": 652
},
{
"epoch": 1.4372707263389581,
"grad_norm": 0.7405373454093933,
"learning_rate": 6.2248103196577846e-06,
"loss": 0.6462,
"step": 653
},
{
"epoch": 1.4394717534849597,
"grad_norm": 0.8104057908058167,
"learning_rate": 6.21237415572302e-06,
"loss": 0.693,
"step": 654
},
{
"epoch": 1.441672780630961,
"grad_norm": 0.8070923686027527,
"learning_rate": 6.19993001801283e-06,
"loss": 0.6472,
"step": 655
},
{
"epoch": 1.4438738077769626,
"grad_norm": 0.776543378829956,
"learning_rate": 6.187477988372216e-06,
"loss": 0.6505,
"step": 656
},
{
"epoch": 1.446074834922964,
"grad_norm": 0.7469165325164795,
"learning_rate": 6.175018148698077e-06,
"loss": 0.6512,
"step": 657
},
{
"epoch": 1.4482758620689655,
"grad_norm": 0.7217493653297424,
"learning_rate": 6.162550580938682e-06,
"loss": 0.642,
"step": 658
},
{
"epoch": 1.450476889214967,
"grad_norm": 0.7939320802688599,
"learning_rate": 6.150075367093129e-06,
"loss": 0.6593,
"step": 659
},
{
"epoch": 1.4526779163609684,
"grad_norm": 0.8231311440467834,
"learning_rate": 6.137592589210803e-06,
"loss": 0.6525,
"step": 660
},
{
"epoch": 1.45487894350697,
"grad_norm": 0.7811779975891113,
"learning_rate": 6.125102329390837e-06,
"loss": 0.659,
"step": 661
},
{
"epoch": 1.4570799706529713,
"grad_norm": 0.7568084597587585,
"learning_rate": 6.112604669781572e-06,
"loss": 0.6445,
"step": 662
},
{
"epoch": 1.459280997798973,
"grad_norm": 0.7805162668228149,
"learning_rate": 6.100099692580021e-06,
"loss": 0.6563,
"step": 663
},
{
"epoch": 1.4614820249449743,
"grad_norm": 0.7505598068237305,
"learning_rate": 6.0875874800313185e-06,
"loss": 0.6474,
"step": 664
},
{
"epoch": 1.4636830520909758,
"grad_norm": 0.7177911996841431,
"learning_rate": 6.075068114428191e-06,
"loss": 0.6548,
"step": 665
},
{
"epoch": 1.4658840792369774,
"grad_norm": 0.6932059526443481,
"learning_rate": 6.062541678110409e-06,
"loss": 0.6319,
"step": 666
},
{
"epoch": 1.4680851063829787,
"grad_norm": 0.7692784070968628,
"learning_rate": 6.050008253464247e-06,
"loss": 0.6418,
"step": 667
},
{
"epoch": 1.47028613352898,
"grad_norm": 0.7305737137794495,
"learning_rate": 6.037467922921943e-06,
"loss": 0.6505,
"step": 668
},
{
"epoch": 1.4724871606749816,
"grad_norm": 0.7683250308036804,
"learning_rate": 6.024920768961153e-06,
"loss": 0.6124,
"step": 669
},
{
"epoch": 1.4746881878209832,
"grad_norm": 0.7519153356552124,
"learning_rate": 6.01236687410441e-06,
"loss": 0.647,
"step": 670
},
{
"epoch": 1.4768892149669846,
"grad_norm": 0.7561257481575012,
"learning_rate": 5.999806320918584e-06,
"loss": 0.6511,
"step": 671
},
{
"epoch": 1.4790902421129861,
"grad_norm": 0.7363054752349854,
"learning_rate": 5.987239192014336e-06,
"loss": 0.6643,
"step": 672
},
{
"epoch": 1.4812912692589875,
"grad_norm": 0.7805913686752319,
"learning_rate": 5.974665570045577e-06,
"loss": 0.6525,
"step": 673
},
{
"epoch": 1.483492296404989,
"grad_norm": 0.7351367473602295,
"learning_rate": 5.962085537708918e-06,
"loss": 0.6492,
"step": 674
},
{
"epoch": 1.4856933235509904,
"grad_norm": 0.7676333785057068,
"learning_rate": 5.949499177743137e-06,
"loss": 0.6727,
"step": 675
},
{
"epoch": 1.487894350696992,
"grad_norm": 0.7225351333618164,
"learning_rate": 5.936906572928625e-06,
"loss": 0.659,
"step": 676
},
{
"epoch": 1.4900953778429935,
"grad_norm": 0.8179192543029785,
"learning_rate": 5.9243078060868445e-06,
"loss": 0.6711,
"step": 677
},
{
"epoch": 1.4922964049889949,
"grad_norm": 0.7187142372131348,
"learning_rate": 5.911702960079788e-06,
"loss": 0.6649,
"step": 678
},
{
"epoch": 1.4944974321349962,
"grad_norm": 0.7550854682922363,
"learning_rate": 5.899092117809434e-06,
"loss": 0.6473,
"step": 679
},
{
"epoch": 1.4966984592809978,
"grad_norm": 0.7542235851287842,
"learning_rate": 5.88647536221719e-06,
"loss": 0.6668,
"step": 680
},
{
"epoch": 1.4988994864269993,
"grad_norm": 0.6938273906707764,
"learning_rate": 5.87385277628336e-06,
"loss": 0.6398,
"step": 681
},
{
"epoch": 1.5011005135730007,
"grad_norm": 0.7257410883903503,
"learning_rate": 5.861224443026595e-06,
"loss": 0.6418,
"step": 682
},
{
"epoch": 1.5033015407190022,
"grad_norm": 0.7318007946014404,
"learning_rate": 5.848590445503345e-06,
"loss": 0.6552,
"step": 683
},
{
"epoch": 1.5055025678650038,
"grad_norm": 0.7129063606262207,
"learning_rate": 5.835950866807314e-06,
"loss": 0.6504,
"step": 684
},
{
"epoch": 1.5077035950110051,
"grad_norm": 0.7286855578422546,
"learning_rate": 5.823305790068912e-06,
"loss": 0.6512,
"step": 685
},
{
"epoch": 1.5099046221570065,
"grad_norm": 0.7786821126937866,
"learning_rate": 5.810655298454711e-06,
"loss": 0.6714,
"step": 686
},
{
"epoch": 1.512105649303008,
"grad_norm": 0.7133433222770691,
"learning_rate": 5.797999475166897e-06,
"loss": 0.6477,
"step": 687
},
{
"epoch": 1.5143066764490096,
"grad_norm": 0.7769381999969482,
"learning_rate": 5.78533840344272e-06,
"loss": 0.6212,
"step": 688
},
{
"epoch": 1.516507703595011,
"grad_norm": 0.77671217918396,
"learning_rate": 5.772672166553952e-06,
"loss": 0.6575,
"step": 689
},
{
"epoch": 1.5187087307410123,
"grad_norm": 0.7859999537467957,
"learning_rate": 5.760000847806337e-06,
"loss": 0.6654,
"step": 690
},
{
"epoch": 1.520909757887014,
"grad_norm": 0.7406096458435059,
"learning_rate": 5.7473245305390355e-06,
"loss": 0.6378,
"step": 691
},
{
"epoch": 1.5231107850330154,
"grad_norm": 0.7268022894859314,
"learning_rate": 5.734643298124091e-06,
"loss": 0.6494,
"step": 692
},
{
"epoch": 1.5253118121790168,
"grad_norm": 0.7276173233985901,
"learning_rate": 5.721957233965868e-06,
"loss": 0.6227,
"step": 693
},
{
"epoch": 1.5275128393250184,
"grad_norm": 0.7826389074325562,
"learning_rate": 5.709266421500516e-06,
"loss": 0.6386,
"step": 694
},
{
"epoch": 1.52971386647102,
"grad_norm": 0.7818190455436707,
"learning_rate": 5.696570944195407e-06,
"loss": 0.624,
"step": 695
},
{
"epoch": 1.5319148936170213,
"grad_norm": 0.8185070157051086,
"learning_rate": 5.683870885548599e-06,
"loss": 0.6682,
"step": 696
},
{
"epoch": 1.5341159207630226,
"grad_norm": 0.7149841785430908,
"learning_rate": 5.671166329088278e-06,
"loss": 0.6433,
"step": 697
},
{
"epoch": 1.5363169479090242,
"grad_norm": 0.7746714949607849,
"learning_rate": 5.658457358372213e-06,
"loss": 0.6458,
"step": 698
},
{
"epoch": 1.5385179750550257,
"grad_norm": 0.7605146169662476,
"learning_rate": 5.645744056987208e-06,
"loss": 0.6455,
"step": 699
},
{
"epoch": 1.540719002201027,
"grad_norm": 0.8349155187606812,
"learning_rate": 5.6330265085485454e-06,
"loss": 0.6522,
"step": 700
},
{
"epoch": 1.5429200293470287,
"grad_norm": 0.8198363780975342,
"learning_rate": 5.620304796699443e-06,
"loss": 0.646,
"step": 701
},
{
"epoch": 1.5451210564930302,
"grad_norm": 0.7053084969520569,
"learning_rate": 5.6075790051105025e-06,
"loss": 0.6453,
"step": 702
},
{
"epoch": 1.5473220836390316,
"grad_norm": 0.770004153251648,
"learning_rate": 5.594849217479155e-06,
"loss": 0.6489,
"step": 703
},
{
"epoch": 1.549523110785033,
"grad_norm": 0.8321583867073059,
"learning_rate": 5.582115517529114e-06,
"loss": 0.674,
"step": 704
},
{
"epoch": 1.5517241379310345,
"grad_norm": 0.7596578598022461,
"learning_rate": 5.569377989009829e-06,
"loss": 0.631,
"step": 705
},
{
"epoch": 1.553925165077036,
"grad_norm": 0.7819406986236572,
"learning_rate": 5.5566367156959246e-06,
"loss": 0.6242,
"step": 706
},
{
"epoch": 1.5561261922230374,
"grad_norm": 0.7157669067382812,
"learning_rate": 5.543891781386655e-06,
"loss": 0.6464,
"step": 707
},
{
"epoch": 1.5583272193690387,
"grad_norm": 0.7040793895721436,
"learning_rate": 5.531143269905356e-06,
"loss": 0.6539,
"step": 708
},
{
"epoch": 1.5605282465150405,
"grad_norm": 0.7729962468147278,
"learning_rate": 5.518391265098888e-06,
"loss": 0.6382,
"step": 709
},
{
"epoch": 1.5627292736610419,
"grad_norm": 0.7409836053848267,
"learning_rate": 5.5056358508370884e-06,
"loss": 0.6422,
"step": 710
},
{
"epoch": 1.5649303008070432,
"grad_norm": 0.7658186554908752,
"learning_rate": 5.4928771110122185e-06,
"loss": 0.662,
"step": 711
},
{
"epoch": 1.5671313279530448,
"grad_norm": 0.7052081227302551,
"learning_rate": 5.480115129538409e-06,
"loss": 0.6325,
"step": 712
},
{
"epoch": 1.5693323550990463,
"grad_norm": 0.7148929834365845,
"learning_rate": 5.467349990351116e-06,
"loss": 0.6391,
"step": 713
},
{
"epoch": 1.5715333822450477,
"grad_norm": 0.7564287185668945,
"learning_rate": 5.454581777406559e-06,
"loss": 0.6527,
"step": 714
},
{
"epoch": 1.573734409391049,
"grad_norm": 0.7831137180328369,
"learning_rate": 5.441810574681175e-06,
"loss": 0.6342,
"step": 715
},
{
"epoch": 1.5759354365370506,
"grad_norm": 0.7543917894363403,
"learning_rate": 5.429036466171067e-06,
"loss": 0.6396,
"step": 716
},
{
"epoch": 1.5781364636830522,
"grad_norm": 0.7687458992004395,
"learning_rate": 5.4162595358914475e-06,
"loss": 0.6167,
"step": 717
},
{
"epoch": 1.5803374908290535,
"grad_norm": 0.8006405830383301,
"learning_rate": 5.403479867876087e-06,
"loss": 0.6443,
"step": 718
},
{
"epoch": 1.582538517975055,
"grad_norm": 0.6990972757339478,
"learning_rate": 5.390697546176763e-06,
"loss": 0.6041,
"step": 719
},
{
"epoch": 1.5847395451210566,
"grad_norm": 0.7406675219535828,
"learning_rate": 5.377912654862708e-06,
"loss": 0.6499,
"step": 720
},
{
"epoch": 1.586940572267058,
"grad_norm": 0.7433100938796997,
"learning_rate": 5.36512527802005e-06,
"loss": 0.6368,
"step": 721
},
{
"epoch": 1.5891415994130593,
"grad_norm": 0.719387412071228,
"learning_rate": 5.35233549975127e-06,
"loss": 0.6362,
"step": 722
},
{
"epoch": 1.591342626559061,
"grad_norm": 0.7260124683380127,
"learning_rate": 5.339543404174639e-06,
"loss": 0.6297,
"step": 723
},
{
"epoch": 1.5935436537050625,
"grad_norm": 0.6759703755378723,
"learning_rate": 5.326749075423672e-06,
"loss": 0.6407,
"step": 724
},
{
"epoch": 1.5957446808510638,
"grad_norm": 0.7436619400978088,
"learning_rate": 5.3139525976465675e-06,
"loss": 0.6516,
"step": 725
},
{
"epoch": 1.5979457079970651,
"grad_norm": 0.7830444574356079,
"learning_rate": 5.301154055005664e-06,
"loss": 0.6299,
"step": 726
},
{
"epoch": 1.6001467351430667,
"grad_norm": 0.734085738658905,
"learning_rate": 5.288353531676873e-06,
"loss": 0.6376,
"step": 727
},
{
"epoch": 1.6023477622890683,
"grad_norm": 0.7484534382820129,
"learning_rate": 5.2755511118491405e-06,
"loss": 0.6527,
"step": 728
},
{
"epoch": 1.6045487894350696,
"grad_norm": 0.7485799193382263,
"learning_rate": 5.262746879723882e-06,
"loss": 0.6475,
"step": 729
},
{
"epoch": 1.6067498165810712,
"grad_norm": 0.7345410585403442,
"learning_rate": 5.249940919514434e-06,
"loss": 0.6344,
"step": 730
},
{
"epoch": 1.6089508437270728,
"grad_norm": 0.7497902512550354,
"learning_rate": 5.237133315445493e-06,
"loss": 0.6325,
"step": 731
},
{
"epoch": 1.611151870873074,
"grad_norm": 0.7568886876106262,
"learning_rate": 5.224324151752575e-06,
"loss": 0.6361,
"step": 732
},
{
"epoch": 1.6133528980190754,
"grad_norm": 0.7870405912399292,
"learning_rate": 5.211513512681451e-06,
"loss": 0.638,
"step": 733
},
{
"epoch": 1.615553925165077,
"grad_norm": 0.7633904814720154,
"learning_rate": 5.198701482487594e-06,
"loss": 0.685,
"step": 734
},
{
"epoch": 1.6177549523110786,
"grad_norm": 0.702457070350647,
"learning_rate": 5.185888145435626e-06,
"loss": 0.6204,
"step": 735
},
{
"epoch": 1.61995597945708,
"grad_norm": 0.7276713848114014,
"learning_rate": 5.173073585798768e-06,
"loss": 0.6555,
"step": 736
},
{
"epoch": 1.6221570066030815,
"grad_norm": 0.7087460160255432,
"learning_rate": 5.160257887858278e-06,
"loss": 0.6476,
"step": 737
},
{
"epoch": 1.624358033749083,
"grad_norm": 0.7397599816322327,
"learning_rate": 5.1474411359029e-06,
"loss": 0.6608,
"step": 738
},
{
"epoch": 1.6265590608950844,
"grad_norm": 0.7691498398780823,
"learning_rate": 5.134623414228315e-06,
"loss": 0.6534,
"step": 739
},
{
"epoch": 1.6287600880410857,
"grad_norm": 0.6871787309646606,
"learning_rate": 5.121804807136578e-06,
"loss": 0.6076,
"step": 740
},
{
"epoch": 1.6309611151870873,
"grad_norm": 0.7464572191238403,
"learning_rate": 5.108985398935569e-06,
"loss": 0.6548,
"step": 741
},
{
"epoch": 1.6331621423330889,
"grad_norm": 0.7194012403488159,
"learning_rate": 5.0961652739384356e-06,
"loss": 0.6595,
"step": 742
},
{
"epoch": 1.6353631694790902,
"grad_norm": 0.729796290397644,
"learning_rate": 5.083344516463043e-06,
"loss": 0.6257,
"step": 743
},
{
"epoch": 1.6375641966250916,
"grad_norm": 0.7039955854415894,
"learning_rate": 5.07052321083141e-06,
"loss": 0.6557,
"step": 744
},
{
"epoch": 1.6397652237710931,
"grad_norm": 0.7107521295547485,
"learning_rate": 5.057701441369167e-06,
"loss": 0.612,
"step": 745
},
{
"epoch": 1.6419662509170947,
"grad_norm": 0.7310301661491394,
"learning_rate": 5.04487929240499e-06,
"loss": 0.666,
"step": 746
},
{
"epoch": 1.644167278063096,
"grad_norm": 0.7443963885307312,
"learning_rate": 5.032056848270056e-06,
"loss": 0.66,
"step": 747
},
{
"epoch": 1.6463683052090976,
"grad_norm": 0.7622290253639221,
"learning_rate": 5.01923419329748e-06,
"loss": 0.6561,
"step": 748
},
{
"epoch": 1.6485693323550992,
"grad_norm": 0.7683627009391785,
"learning_rate": 5.006411411821762e-06,
"loss": 0.6452,
"step": 749
},
{
"epoch": 1.6507703595011005,
"grad_norm": 0.727986216545105,
"learning_rate": 4.993588588178239e-06,
"loss": 0.6776,
"step": 750
},
{
"epoch": 1.6529713866471019,
"grad_norm": 0.7469594478607178,
"learning_rate": 4.980765806702522e-06,
"loss": 0.6416,
"step": 751
},
{
"epoch": 1.6551724137931034,
"grad_norm": 0.7999967932701111,
"learning_rate": 4.967943151729945e-06,
"loss": 0.6465,
"step": 752
},
{
"epoch": 1.657373440939105,
"grad_norm": 0.7616086006164551,
"learning_rate": 4.955120707595011e-06,
"loss": 0.6706,
"step": 753
},
{
"epoch": 1.6595744680851063,
"grad_norm": 0.7449992895126343,
"learning_rate": 4.942298558630834e-06,
"loss": 0.6468,
"step": 754
},
{
"epoch": 1.661775495231108,
"grad_norm": 0.7490622997283936,
"learning_rate": 4.9294767891685904e-06,
"loss": 0.6319,
"step": 755
},
{
"epoch": 1.6639765223771095,
"grad_norm": 0.7469477653503418,
"learning_rate": 4.91665548353696e-06,
"loss": 0.6738,
"step": 756
},
{
"epoch": 1.6661775495231108,
"grad_norm": 0.771722674369812,
"learning_rate": 4.903834726061565e-06,
"loss": 0.6245,
"step": 757
},
{
"epoch": 1.6683785766691122,
"grad_norm": 0.75638347864151,
"learning_rate": 4.891014601064432e-06,
"loss": 0.6602,
"step": 758
},
{
"epoch": 1.6705796038151137,
"grad_norm": 0.7275886535644531,
"learning_rate": 4.8781951928634235e-06,
"loss": 0.6492,
"step": 759
},
{
"epoch": 1.6727806309611153,
"grad_norm": 0.7729997038841248,
"learning_rate": 4.865376585771687e-06,
"loss": 0.63,
"step": 760
},
{
"epoch": 1.6749816581071166,
"grad_norm": 0.726089596748352,
"learning_rate": 4.852558864097101e-06,
"loss": 0.6197,
"step": 761
},
{
"epoch": 1.677182685253118,
"grad_norm": 0.7375982403755188,
"learning_rate": 4.839742112141725e-06,
"loss": 0.641,
"step": 762
},
{
"epoch": 1.6793837123991195,
"grad_norm": 0.769862174987793,
"learning_rate": 4.826926414201234e-06,
"loss": 0.6567,
"step": 763
},
{
"epoch": 1.681584739545121,
"grad_norm": 0.7552918791770935,
"learning_rate": 4.814111854564375e-06,
"loss": 0.6365,
"step": 764
},
{
"epoch": 1.6837857666911225,
"grad_norm": 0.6974634528160095,
"learning_rate": 4.801298517512408e-06,
"loss": 0.6361,
"step": 765
},
{
"epoch": 1.685986793837124,
"grad_norm": 0.7454755306243896,
"learning_rate": 4.7884864873185485e-06,
"loss": 0.6364,
"step": 766
},
{
"epoch": 1.6881878209831256,
"grad_norm": 0.7395055294036865,
"learning_rate": 4.775675848247427e-06,
"loss": 0.6347,
"step": 767
},
{
"epoch": 1.690388848129127,
"grad_norm": 0.7322304248809814,
"learning_rate": 4.762866684554509e-06,
"loss": 0.6377,
"step": 768
},
{
"epoch": 1.6925898752751283,
"grad_norm": 0.7651588916778564,
"learning_rate": 4.7500590804855695e-06,
"loss": 0.6378,
"step": 769
},
{
"epoch": 1.6947909024211298,
"grad_norm": 0.7306197881698608,
"learning_rate": 4.73725312027612e-06,
"loss": 0.6507,
"step": 770
},
{
"epoch": 1.6969919295671314,
"grad_norm": 0.7101766467094421,
"learning_rate": 4.724448888150861e-06,
"loss": 0.6389,
"step": 771
},
{
"epoch": 1.6991929567131328,
"grad_norm": 0.7274670004844666,
"learning_rate": 4.711646468323129e-06,
"loss": 0.6553,
"step": 772
},
{
"epoch": 1.7013939838591343,
"grad_norm": 0.7361597418785095,
"learning_rate": 4.698845944994338e-06,
"loss": 0.6421,
"step": 773
},
{
"epoch": 1.7035950110051359,
"grad_norm": 0.7855360507965088,
"learning_rate": 4.686047402353433e-06,
"loss": 0.6366,
"step": 774
},
{
"epoch": 1.7057960381511372,
"grad_norm": 0.6977729201316833,
"learning_rate": 4.67325092457633e-06,
"loss": 0.6644,
"step": 775
},
{
"epoch": 1.7079970652971386,
"grad_norm": 0.7415184378623962,
"learning_rate": 4.660456595825362e-06,
"loss": 0.6477,
"step": 776
},
{
"epoch": 1.7101980924431401,
"grad_norm": 0.7262795567512512,
"learning_rate": 4.64766450024873e-06,
"loss": 0.6399,
"step": 777
},
{
"epoch": 1.7123991195891417,
"grad_norm": 0.7221468687057495,
"learning_rate": 4.634874721979952e-06,
"loss": 0.6231,
"step": 778
},
{
"epoch": 1.714600146735143,
"grad_norm": 0.729002833366394,
"learning_rate": 4.622087345137295e-06,
"loss": 0.6457,
"step": 779
},
{
"epoch": 1.7168011738811444,
"grad_norm": 0.7427892684936523,
"learning_rate": 4.609302453823238e-06,
"loss": 0.6363,
"step": 780
},
{
"epoch": 1.719002201027146,
"grad_norm": 0.7627214789390564,
"learning_rate": 4.596520132123915e-06,
"loss": 0.6351,
"step": 781
},
{
"epoch": 1.7212032281731475,
"grad_norm": 0.7454394698143005,
"learning_rate": 4.583740464108554e-06,
"loss": 0.6578,
"step": 782
},
{
"epoch": 1.7234042553191489,
"grad_norm": 0.7448314428329468,
"learning_rate": 4.570963533828934e-06,
"loss": 0.6536,
"step": 783
},
{
"epoch": 1.7256052824651504,
"grad_norm": 0.7190151810646057,
"learning_rate": 4.558189425318826e-06,
"loss": 0.6521,
"step": 784
},
{
"epoch": 1.727806309611152,
"grad_norm": 0.7045521140098572,
"learning_rate": 4.545418222593442e-06,
"loss": 0.6342,
"step": 785
},
{
"epoch": 1.7300073367571533,
"grad_norm": 0.7583068609237671,
"learning_rate": 4.532650009648885e-06,
"loss": 0.6105,
"step": 786
},
{
"epoch": 1.7322083639031547,
"grad_norm": 0.7634114623069763,
"learning_rate": 4.5198848704615915e-06,
"loss": 0.6721,
"step": 787
},
{
"epoch": 1.7344093910491563,
"grad_norm": 0.7080186009407043,
"learning_rate": 4.507122888987782e-06,
"loss": 0.6376,
"step": 788
},
{
"epoch": 1.7366104181951578,
"grad_norm": 0.7349112033843994,
"learning_rate": 4.4943641491629115e-06,
"loss": 0.6506,
"step": 789
},
{
"epoch": 1.7388114453411592,
"grad_norm": 0.7727476954460144,
"learning_rate": 4.481608734901114e-06,
"loss": 0.6467,
"step": 790
},
{
"epoch": 1.7410124724871605,
"grad_norm": 0.7642368078231812,
"learning_rate": 4.468856730094646e-06,
"loss": 0.6584,
"step": 791
},
{
"epoch": 1.7432134996331623,
"grad_norm": 0.7354398965835571,
"learning_rate": 4.456108218613346e-06,
"loss": 0.6464,
"step": 792
},
{
"epoch": 1.7454145267791636,
"grad_norm": 0.7356769442558289,
"learning_rate": 4.443363284304077e-06,
"loss": 0.6386,
"step": 793
},
{
"epoch": 1.747615553925165,
"grad_norm": 0.7684168219566345,
"learning_rate": 4.430622010990172e-06,
"loss": 0.6697,
"step": 794
},
{
"epoch": 1.7498165810711666,
"grad_norm": 0.7933084964752197,
"learning_rate": 4.417884482470887e-06,
"loss": 0.6625,
"step": 795
},
{
"epoch": 1.7520176082171681,
"grad_norm": 0.7346345782279968,
"learning_rate": 4.405150782520846e-06,
"loss": 0.6642,
"step": 796
},
{
"epoch": 1.7542186353631695,
"grad_norm": 0.815883219242096,
"learning_rate": 4.392420994889498e-06,
"loss": 0.6659,
"step": 797
},
{
"epoch": 1.7564196625091708,
"grad_norm": 0.7578274607658386,
"learning_rate": 4.379695203300558e-06,
"loss": 0.6793,
"step": 798
},
{
"epoch": 1.7586206896551724,
"grad_norm": 0.8254396915435791,
"learning_rate": 4.366973491451456e-06,
"loss": 0.6421,
"step": 799
},
{
"epoch": 1.760821716801174,
"grad_norm": 0.7950279116630554,
"learning_rate": 4.354255943012793e-06,
"loss": 0.6069,
"step": 800
},
{
"epoch": 1.7630227439471753,
"grad_norm": 0.767777681350708,
"learning_rate": 4.3415426416277885e-06,
"loss": 0.6361,
"step": 801
},
{
"epoch": 1.7652237710931769,
"grad_norm": 0.8324073553085327,
"learning_rate": 4.3288336709117246e-06,
"loss": 0.6548,
"step": 802
},
{
"epoch": 1.7674247982391784,
"grad_norm": 0.7362907528877258,
"learning_rate": 4.316129114451402e-06,
"loss": 0.645,
"step": 803
},
{
"epoch": 1.7696258253851798,
"grad_norm": 0.7246103882789612,
"learning_rate": 4.303429055804594e-06,
"loss": 0.6435,
"step": 804
},
{
"epoch": 1.771826852531181,
"grad_norm": 0.739098846912384,
"learning_rate": 4.290733578499486e-06,
"loss": 0.646,
"step": 805
},
{
"epoch": 1.7740278796771827,
"grad_norm": 0.7254499793052673,
"learning_rate": 4.278042766034134e-06,
"loss": 0.6287,
"step": 806
},
{
"epoch": 1.7762289068231842,
"grad_norm": 0.7469090223312378,
"learning_rate": 4.265356701875911e-06,
"loss": 0.6404,
"step": 807
},
{
"epoch": 1.7784299339691856,
"grad_norm": 0.7500411868095398,
"learning_rate": 4.252675469460965e-06,
"loss": 0.6488,
"step": 808
},
{
"epoch": 1.780630961115187,
"grad_norm": 0.7752728462219238,
"learning_rate": 4.239999152193664e-06,
"loss": 0.6489,
"step": 809
},
{
"epoch": 1.7828319882611887,
"grad_norm": 0.7430712580680847,
"learning_rate": 4.227327833446047e-06,
"loss": 0.6353,
"step": 810
},
{
"epoch": 1.78503301540719,
"grad_norm": 0.8414586782455444,
"learning_rate": 4.2146615965572804e-06,
"loss": 0.6358,
"step": 811
},
{
"epoch": 1.7872340425531914,
"grad_norm": 0.6868074536323547,
"learning_rate": 4.2020005248331056e-06,
"loss": 0.6241,
"step": 812
},
{
"epoch": 1.789435069699193,
"grad_norm": 0.7421786189079285,
"learning_rate": 4.189344701545291e-06,
"loss": 0.6406,
"step": 813
},
{
"epoch": 1.7916360968451945,
"grad_norm": 0.6971707344055176,
"learning_rate": 4.176694209931089e-06,
"loss": 0.6476,
"step": 814
},
{
"epoch": 1.7938371239911959,
"grad_norm": 0.721737265586853,
"learning_rate": 4.164049133192688e-06,
"loss": 0.603,
"step": 815
},
{
"epoch": 1.7960381511371972,
"grad_norm": 0.7487144470214844,
"learning_rate": 4.1514095544966556e-06,
"loss": 0.6712,
"step": 816
},
{
"epoch": 1.7982391782831988,
"grad_norm": 0.7321934103965759,
"learning_rate": 4.138775556973406e-06,
"loss": 0.6529,
"step": 817
},
{
"epoch": 1.8004402054292004,
"grad_norm": 0.731799304485321,
"learning_rate": 4.126147223716642e-06,
"loss": 0.6177,
"step": 818
},
{
"epoch": 1.8026412325752017,
"grad_norm": 0.7061100006103516,
"learning_rate": 4.113524637782812e-06,
"loss": 0.6475,
"step": 819
},
{
"epoch": 1.8048422597212033,
"grad_norm": 0.745857298374176,
"learning_rate": 4.100907882190567e-06,
"loss": 0.6523,
"step": 820
},
{
"epoch": 1.8070432868672048,
"grad_norm": 0.7384964823722839,
"learning_rate": 4.088297039920212e-06,
"loss": 0.6481,
"step": 821
},
{
"epoch": 1.8092443140132062,
"grad_norm": 0.7227874398231506,
"learning_rate": 4.075692193913156e-06,
"loss": 0.6517,
"step": 822
},
{
"epoch": 1.8114453411592075,
"grad_norm": 0.7166897058486938,
"learning_rate": 4.063093427071376e-06,
"loss": 0.6578,
"step": 823
},
{
"epoch": 1.813646368305209,
"grad_norm": 0.7427800893783569,
"learning_rate": 4.0505008222568655e-06,
"loss": 0.6003,
"step": 824
},
{
"epoch": 1.8158473954512107,
"grad_norm": 0.7219855189323425,
"learning_rate": 4.037914462291085e-06,
"loss": 0.6453,
"step": 825
},
{
"epoch": 1.818048422597212,
"grad_norm": 0.7622770071029663,
"learning_rate": 4.025334429954425e-06,
"loss": 0.6417,
"step": 826
},
{
"epoch": 1.8202494497432133,
"grad_norm": 0.7519661784172058,
"learning_rate": 4.012760807985665e-06,
"loss": 0.6631,
"step": 827
},
{
"epoch": 1.8224504768892151,
"grad_norm": 0.719353199005127,
"learning_rate": 4.0001936790814175e-06,
"loss": 0.6247,
"step": 828
},
{
"epoch": 1.8246515040352165,
"grad_norm": 0.7299590706825256,
"learning_rate": 3.987633125895593e-06,
"loss": 0.6662,
"step": 829
},
{
"epoch": 1.8268525311812178,
"grad_norm": 0.7449058294296265,
"learning_rate": 3.975079231038848e-06,
"loss": 0.6687,
"step": 830
},
{
"epoch": 1.8290535583272194,
"grad_norm": 0.7098851799964905,
"learning_rate": 3.962532077078058e-06,
"loss": 0.6541,
"step": 831
},
{
"epoch": 1.831254585473221,
"grad_norm": 0.7212103009223938,
"learning_rate": 3.949991746535753e-06,
"loss": 0.6348,
"step": 832
},
{
"epoch": 1.8334556126192223,
"grad_norm": 0.6886880993843079,
"learning_rate": 3.937458321889592e-06,
"loss": 0.6179,
"step": 833
},
{
"epoch": 1.8356566397652236,
"grad_norm": 0.7266597151756287,
"learning_rate": 3.924931885571811e-06,
"loss": 0.6399,
"step": 834
},
{
"epoch": 1.8378576669112252,
"grad_norm": 0.70559161901474,
"learning_rate": 3.912412519968685e-06,
"loss": 0.6271,
"step": 835
},
{
"epoch": 1.8400586940572268,
"grad_norm": 0.7089952826499939,
"learning_rate": 3.899900307419982e-06,
"loss": 0.6339,
"step": 836
},
{
"epoch": 1.8422597212032281,
"grad_norm": 0.7014769911766052,
"learning_rate": 3.887395330218429e-06,
"loss": 0.6498,
"step": 837
},
{
"epoch": 1.8444607483492297,
"grad_norm": 0.7272794246673584,
"learning_rate": 3.874897670609164e-06,
"loss": 0.6039,
"step": 838
},
{
"epoch": 1.8466617754952313,
"grad_norm": 0.7345197200775146,
"learning_rate": 3.862407410789198e-06,
"loss": 0.6131,
"step": 839
},
{
"epoch": 1.8488628026412326,
"grad_norm": 0.7372382283210754,
"learning_rate": 3.849924632906872e-06,
"loss": 0.6472,
"step": 840
},
{
"epoch": 1.851063829787234,
"grad_norm": 0.7239442467689514,
"learning_rate": 3.83744941906132e-06,
"loss": 0.6529,
"step": 841
},
{
"epoch": 1.8532648569332355,
"grad_norm": 0.75417160987854,
"learning_rate": 3.824981851301924e-06,
"loss": 0.6253,
"step": 842
},
{
"epoch": 1.855465884079237,
"grad_norm": 0.711287796497345,
"learning_rate": 3.8125220116277855e-06,
"loss": 0.63,
"step": 843
},
{
"epoch": 1.8576669112252384,
"grad_norm": 0.7281585931777954,
"learning_rate": 3.8000699819871704e-06,
"loss": 0.6511,
"step": 844
},
{
"epoch": 1.8598679383712398,
"grad_norm": 0.7852776050567627,
"learning_rate": 3.787625844276982e-06,
"loss": 0.6195,
"step": 845
},
{
"epoch": 1.8620689655172413,
"grad_norm": 0.7256373763084412,
"learning_rate": 3.775189680342217e-06,
"loss": 0.6319,
"step": 846
},
{
"epoch": 1.864269992663243,
"grad_norm": 0.7157067060470581,
"learning_rate": 3.7627615719754294e-06,
"loss": 0.6471,
"step": 847
},
{
"epoch": 1.8664710198092442,
"grad_norm": 0.7621313333511353,
"learning_rate": 3.7503416009161915e-06,
"loss": 0.6509,
"step": 848
},
{
"epoch": 1.8686720469552458,
"grad_norm": 0.7717511653900146,
"learning_rate": 3.737929848850555e-06,
"loss": 0.643,
"step": 849
},
{
"epoch": 1.8708730741012474,
"grad_norm": 0.7611857056617737,
"learning_rate": 3.72552639741052e-06,
"loss": 0.6714,
"step": 850
},
{
"epoch": 1.8730741012472487,
"grad_norm": 0.7195001840591431,
"learning_rate": 3.7131313281734895e-06,
"loss": 0.6499,
"step": 851
},
{
"epoch": 1.87527512839325,
"grad_norm": 0.7105995416641235,
"learning_rate": 3.7007447226617367e-06,
"loss": 0.6528,
"step": 852
},
{
"epoch": 1.8774761555392516,
"grad_norm": 0.7779428362846375,
"learning_rate": 3.6883666623418702e-06,
"loss": 0.6238,
"step": 853
},
{
"epoch": 1.8796771826852532,
"grad_norm": 0.7399506568908691,
"learning_rate": 3.6759972286242977e-06,
"loss": 0.6197,
"step": 854
},
{
"epoch": 1.8818782098312545,
"grad_norm": 0.7091827392578125,
"learning_rate": 3.663636502862689e-06,
"loss": 0.6438,
"step": 855
},
{
"epoch": 1.884079236977256,
"grad_norm": 0.702155590057373,
"learning_rate": 3.6512845663534412e-06,
"loss": 0.637,
"step": 856
},
{
"epoch": 1.8862802641232577,
"grad_norm": 0.7721629738807678,
"learning_rate": 3.638941500335145e-06,
"loss": 0.6385,
"step": 857
},
{
"epoch": 1.888481291269259,
"grad_norm": 0.7414896488189697,
"learning_rate": 3.62660738598805e-06,
"loss": 0.6647,
"step": 858
},
{
"epoch": 1.8906823184152604,
"grad_norm": 0.7226418852806091,
"learning_rate": 3.6142823044335306e-06,
"loss": 0.6265,
"step": 859
},
{
"epoch": 1.892883345561262,
"grad_norm": 0.6877660751342773,
"learning_rate": 3.6019663367335507e-06,
"loss": 0.6459,
"step": 860
},
{
"epoch": 1.8950843727072635,
"grad_norm": 0.7526683807373047,
"learning_rate": 3.5896595638901373e-06,
"loss": 0.6405,
"step": 861
},
{
"epoch": 1.8972853998532648,
"grad_norm": 0.7327764630317688,
"learning_rate": 3.5773620668448384e-06,
"loss": 0.6366,
"step": 862
},
{
"epoch": 1.8994864269992662,
"grad_norm": 0.708452045917511,
"learning_rate": 3.5650739264781976e-06,
"loss": 0.6562,
"step": 863
},
{
"epoch": 1.9016874541452677,
"grad_norm": 0.6902774572372437,
"learning_rate": 3.552795223609219e-06,
"loss": 0.6582,
"step": 864
},
{
"epoch": 1.9038884812912693,
"grad_norm": 0.7468820810317993,
"learning_rate": 3.540526038994834e-06,
"loss": 0.6464,
"step": 865
},
{
"epoch": 1.9060895084372707,
"grad_norm": 0.7193624377250671,
"learning_rate": 3.5282664533293763e-06,
"loss": 0.648,
"step": 866
},
{
"epoch": 1.9082905355832722,
"grad_norm": 0.7115476727485657,
"learning_rate": 3.516016547244047e-06,
"loss": 0.6369,
"step": 867
},
{
"epoch": 1.9104915627292738,
"grad_norm": 0.6839916706085205,
"learning_rate": 3.5037764013063825e-06,
"loss": 0.6286,
"step": 868
},
{
"epoch": 1.9126925898752751,
"grad_norm": 0.7385376691818237,
"learning_rate": 3.4915460960197277e-06,
"loss": 0.6191,
"step": 869
},
{
"epoch": 1.9148936170212765,
"grad_norm": 0.7022069692611694,
"learning_rate": 3.479325711822704e-06,
"loss": 0.6483,
"step": 870
},
{
"epoch": 1.917094644167278,
"grad_norm": 0.7348030805587769,
"learning_rate": 3.4671153290886863e-06,
"loss": 0.6471,
"step": 871
},
{
"epoch": 1.9192956713132796,
"grad_norm": 0.7171069979667664,
"learning_rate": 3.4549150281252635e-06,
"loss": 0.6373,
"step": 872
},
{
"epoch": 1.921496698459281,
"grad_norm": 0.7076753377914429,
"learning_rate": 3.442724889173724e-06,
"loss": 0.6781,
"step": 873
},
{
"epoch": 1.9236977256052825,
"grad_norm": 0.6863194704055786,
"learning_rate": 3.4305449924085165e-06,
"loss": 0.6409,
"step": 874
},
{
"epoch": 1.925898752751284,
"grad_norm": 0.6648731231689453,
"learning_rate": 3.4183754179367268e-06,
"loss": 0.6295,
"step": 875
},
{
"epoch": 1.9280997798972854,
"grad_norm": 0.7062124609947205,
"learning_rate": 3.406216245797551e-06,
"loss": 0.6175,
"step": 876
},
{
"epoch": 1.9303008070432868,
"grad_norm": 0.7173147201538086,
"learning_rate": 3.3940675559617724e-06,
"loss": 0.6279,
"step": 877
},
{
"epoch": 1.9325018341892883,
"grad_norm": 0.702974796295166,
"learning_rate": 3.3819294283312286e-06,
"loss": 0.6492,
"step": 878
},
{
"epoch": 1.93470286133529,
"grad_norm": 0.7027614116668701,
"learning_rate": 3.3698019427382912e-06,
"loss": 0.6292,
"step": 879
},
{
"epoch": 1.9369038884812912,
"grad_norm": 0.7111174464225769,
"learning_rate": 3.357685178945339e-06,
"loss": 0.6195,
"step": 880
},
{
"epoch": 1.9391049156272926,
"grad_norm": 0.7053228616714478,
"learning_rate": 3.3455792166442323e-06,
"loss": 0.6283,
"step": 881
},
{
"epoch": 1.9413059427732942,
"grad_norm": 0.7096617221832275,
"learning_rate": 3.3334841354557923e-06,
"loss": 0.6467,
"step": 882
},
{
"epoch": 1.9435069699192957,
"grad_norm": 0.6906343698501587,
"learning_rate": 3.3214000149292734e-06,
"loss": 0.6043,
"step": 883
},
{
"epoch": 1.945707997065297,
"grad_norm": 0.7340171337127686,
"learning_rate": 3.3093269345418443e-06,
"loss": 0.6254,
"step": 884
},
{
"epoch": 1.9479090242112986,
"grad_norm": 0.7353804707527161,
"learning_rate": 3.29726497369806e-06,
"loss": 0.6662,
"step": 885
},
{
"epoch": 1.9501100513573002,
"grad_norm": 0.7345585823059082,
"learning_rate": 3.2852142117293435e-06,
"loss": 0.6165,
"step": 886
},
{
"epoch": 1.9523110785033015,
"grad_norm": 0.6950768232345581,
"learning_rate": 3.273174727893463e-06,
"loss": 0.6527,
"step": 887
},
{
"epoch": 1.9545121056493029,
"grad_norm": 0.7124642729759216,
"learning_rate": 3.261146601374009e-06,
"loss": 0.6735,
"step": 888
},
{
"epoch": 1.9567131327953045,
"grad_norm": 0.7431399822235107,
"learning_rate": 3.2491299112798793e-06,
"loss": 0.6474,
"step": 889
},
{
"epoch": 1.958914159941306,
"grad_norm": 0.7045615911483765,
"learning_rate": 3.237124736644749e-06,
"loss": 0.6246,
"step": 890
},
{
"epoch": 1.9611151870873074,
"grad_norm": 0.6732543110847473,
"learning_rate": 3.22513115642656e-06,
"loss": 0.6484,
"step": 891
},
{
"epoch": 1.963316214233309,
"grad_norm": 0.7434794902801514,
"learning_rate": 3.213149249506997e-06,
"loss": 0.6206,
"step": 892
},
{
"epoch": 1.9655172413793105,
"grad_norm": 0.6707674264907837,
"learning_rate": 3.2011790946909673e-06,
"loss": 0.6374,
"step": 893
},
{
"epoch": 1.9677182685253118,
"grad_norm": 0.7027508020401001,
"learning_rate": 3.189220770706086e-06,
"loss": 0.638,
"step": 894
},
{
"epoch": 1.9699192956713132,
"grad_norm": 0.7122715711593628,
"learning_rate": 3.177274356202157e-06,
"loss": 0.6434,
"step": 895
},
{
"epoch": 1.9721203228173148,
"grad_norm": 0.7096172571182251,
"learning_rate": 3.1653399297506583e-06,
"loss": 0.6464,
"step": 896
},
{
"epoch": 1.9743213499633163,
"grad_norm": 0.7474642992019653,
"learning_rate": 3.1534175698442194e-06,
"loss": 0.6509,
"step": 897
},
{
"epoch": 1.9765223771093177,
"grad_norm": 0.7248972654342651,
"learning_rate": 3.141507354896107e-06,
"loss": 0.6508,
"step": 898
},
{
"epoch": 1.978723404255319,
"grad_norm": 0.6969814896583557,
"learning_rate": 3.129609363239714e-06,
"loss": 0.644,
"step": 899
},
{
"epoch": 1.9809244314013206,
"grad_norm": 0.7344887256622314,
"learning_rate": 3.11772367312804e-06,
"loss": 0.642,
"step": 900
},
{
"epoch": 1.9831254585473221,
"grad_norm": 0.7122581005096436,
"learning_rate": 3.105850362733176e-06,
"loss": 0.6666,
"step": 901
},
{
"epoch": 1.9853264856933235,
"grad_norm": 0.7175470590591431,
"learning_rate": 3.093989510145792e-06,
"loss": 0.6386,
"step": 902
},
{
"epoch": 1.987527512839325,
"grad_norm": 0.7330093383789062,
"learning_rate": 3.082141193374625e-06,
"loss": 0.6392,
"step": 903
},
{
"epoch": 1.9897285399853266,
"grad_norm": 0.7079413533210754,
"learning_rate": 3.0703054903459607e-06,
"loss": 0.6345,
"step": 904
},
{
"epoch": 1.991929567131328,
"grad_norm": 0.6710329055786133,
"learning_rate": 3.0584824789031266e-06,
"loss": 0.6282,
"step": 905
},
{
"epoch": 1.9941305942773293,
"grad_norm": 0.720341145992279,
"learning_rate": 3.046672236805976e-06,
"loss": 0.6491,
"step": 906
},
{
"epoch": 1.9963316214233309,
"grad_norm": 0.7132683396339417,
"learning_rate": 3.0348748417303826e-06,
"loss": 0.6397,
"step": 907
},
{
"epoch": 1.9985326485693324,
"grad_norm": 0.7394329309463501,
"learning_rate": 3.0230903712677207e-06,
"loss": 0.64,
"step": 908
},
{
"epoch": 2.000733675715334,
"grad_norm": 0.7213972806930542,
"learning_rate": 3.0113189029243616e-06,
"loss": 0.5812,
"step": 909
},
{
"epoch": 2.002934702861335,
"grad_norm": 0.8564555644989014,
"learning_rate": 2.9995605141211615e-06,
"loss": 0.5398,
"step": 910
},
{
"epoch": 2.005135730007337,
"grad_norm": 0.8516649603843689,
"learning_rate": 2.987815282192951e-06,
"loss": 0.5288,
"step": 911
},
{
"epoch": 2.0073367571533383,
"grad_norm": 0.7479093074798584,
"learning_rate": 2.976083284388031e-06,
"loss": 0.5227,
"step": 912
},
{
"epoch": 2.0095377842993396,
"grad_norm": 0.6992958188056946,
"learning_rate": 2.964364597867659e-06,
"loss": 0.5353,
"step": 913
},
{
"epoch": 2.011738811445341,
"grad_norm": 0.6885507702827454,
"learning_rate": 2.9526592997055488e-06,
"loss": 0.519,
"step": 914
},
{
"epoch": 2.0139398385913427,
"grad_norm": 0.8126917481422424,
"learning_rate": 2.940967466887351e-06,
"loss": 0.5516,
"step": 915
},
{
"epoch": 2.016140865737344,
"grad_norm": 0.8459115624427795,
"learning_rate": 2.929289176310161e-06,
"loss": 0.5151,
"step": 916
},
{
"epoch": 2.0183418928833454,
"grad_norm": 0.819183349609375,
"learning_rate": 2.9176245047820064e-06,
"loss": 0.5396,
"step": 917
},
{
"epoch": 2.020542920029347,
"grad_norm": 0.7847302556037903,
"learning_rate": 2.9059735290213387e-06,
"loss": 0.5108,
"step": 918
},
{
"epoch": 2.0227439471753486,
"grad_norm": 0.7248134016990662,
"learning_rate": 2.8943363256565394e-06,
"loss": 0.5186,
"step": 919
},
{
"epoch": 2.02494497432135,
"grad_norm": 0.7036641836166382,
"learning_rate": 2.882712971225401e-06,
"loss": 0.5478,
"step": 920
},
{
"epoch": 2.0271460014673512,
"grad_norm": 0.7601596117019653,
"learning_rate": 2.871103542174637e-06,
"loss": 0.5447,
"step": 921
},
{
"epoch": 2.029347028613353,
"grad_norm": 0.7116886973381042,
"learning_rate": 2.859508114859374e-06,
"loss": 0.5497,
"step": 922
},
{
"epoch": 2.0315480557593544,
"grad_norm": 0.6906183958053589,
"learning_rate": 2.8479267655426435e-06,
"loss": 0.549,
"step": 923
},
{
"epoch": 2.0337490829053557,
"grad_norm": 0.7110432982444763,
"learning_rate": 2.8363595703948933e-06,
"loss": 0.5523,
"step": 924
},
{
"epoch": 2.0359501100513575,
"grad_norm": 0.7031733989715576,
"learning_rate": 2.824806605493477e-06,
"loss": 0.5433,
"step": 925
},
{
"epoch": 2.038151137197359,
"grad_norm": 0.7145381569862366,
"learning_rate": 2.8132679468221537e-06,
"loss": 0.5218,
"step": 926
},
{
"epoch": 2.04035216434336,
"grad_norm": 0.6916840672492981,
"learning_rate": 2.80174367027059e-06,
"loss": 0.5196,
"step": 927
},
{
"epoch": 2.0425531914893615,
"grad_norm": 0.6630658507347107,
"learning_rate": 2.790233851633868e-06,
"loss": 0.5321,
"step": 928
},
{
"epoch": 2.0447542186353633,
"grad_norm": 0.6972963213920593,
"learning_rate": 2.7787385666119704e-06,
"loss": 0.5294,
"step": 929
},
{
"epoch": 2.0469552457813647,
"grad_norm": 0.6733927130699158,
"learning_rate": 2.7672578908093024e-06,
"loss": 0.5149,
"step": 930
},
{
"epoch": 2.049156272927366,
"grad_norm": 0.6992024779319763,
"learning_rate": 2.755791899734176e-06,
"loss": 0.5208,
"step": 931
},
{
"epoch": 2.0513573000733674,
"grad_norm": 0.7477166652679443,
"learning_rate": 2.7443406687983267e-06,
"loss": 0.532,
"step": 932
},
{
"epoch": 2.053558327219369,
"grad_norm": 0.7102105021476746,
"learning_rate": 2.7329042733164145e-06,
"loss": 0.5183,
"step": 933
},
{
"epoch": 2.0557593543653705,
"grad_norm": 0.7366941571235657,
"learning_rate": 2.7214827885055194e-06,
"loss": 0.5226,
"step": 934
},
{
"epoch": 2.057960381511372,
"grad_norm": 0.7238156199455261,
"learning_rate": 2.7100762894846633e-06,
"loss": 0.5723,
"step": 935
},
{
"epoch": 2.0601614086573736,
"grad_norm": 0.7042707800865173,
"learning_rate": 2.698684851274297e-06,
"loss": 0.5195,
"step": 936
},
{
"epoch": 2.062362435803375,
"grad_norm": 0.7326483726501465,
"learning_rate": 2.687308548795825e-06,
"loss": 0.4963,
"step": 937
},
{
"epoch": 2.0645634629493763,
"grad_norm": 0.6792129874229431,
"learning_rate": 2.675947456871096e-06,
"loss": 0.5424,
"step": 938
},
{
"epoch": 2.0667644900953777,
"grad_norm": 0.696242094039917,
"learning_rate": 2.6646016502219304e-06,
"loss": 0.5398,
"step": 939
},
{
"epoch": 2.0689655172413794,
"grad_norm": 0.704230546951294,
"learning_rate": 2.6532712034696034e-06,
"loss": 0.5324,
"step": 940
},
{
"epoch": 2.071166544387381,
"grad_norm": 0.7116917967796326,
"learning_rate": 2.6419561911343812e-06,
"loss": 0.5436,
"step": 941
},
{
"epoch": 2.073367571533382,
"grad_norm": 0.7269853353500366,
"learning_rate": 2.6306566876350072e-06,
"loss": 0.5447,
"step": 942
},
{
"epoch": 2.075568598679384,
"grad_norm": 0.6866284012794495,
"learning_rate": 2.6193727672882308e-06,
"loss": 0.5406,
"step": 943
},
{
"epoch": 2.0777696258253853,
"grad_norm": 0.6931535005569458,
"learning_rate": 2.608104504308311e-06,
"loss": 0.5269,
"step": 944
},
{
"epoch": 2.0799706529713866,
"grad_norm": 0.6900680661201477,
"learning_rate": 2.596851972806522e-06,
"loss": 0.4984,
"step": 945
},
{
"epoch": 2.082171680117388,
"grad_norm": 0.6793876886367798,
"learning_rate": 2.5856152467906793e-06,
"loss": 0.5143,
"step": 946
},
{
"epoch": 2.0843727072633897,
"grad_norm": 0.7096752524375916,
"learning_rate": 2.5743944001646394e-06,
"loss": 0.5009,
"step": 947
},
{
"epoch": 2.086573734409391,
"grad_norm": 0.7271909713745117,
"learning_rate": 2.563189506727828e-06,
"loss": 0.5588,
"step": 948
},
{
"epoch": 2.0887747615553924,
"grad_norm": 0.7151763439178467,
"learning_rate": 2.55200064017474e-06,
"loss": 0.5284,
"step": 949
},
{
"epoch": 2.090975788701394,
"grad_norm": 0.6767574548721313,
"learning_rate": 2.540827874094462e-06,
"loss": 0.5565,
"step": 950
},
{
"epoch": 2.0931768158473956,
"grad_norm": 0.6770440936088562,
"learning_rate": 2.529671281970192e-06,
"loss": 0.5322,
"step": 951
},
{
"epoch": 2.095377842993397,
"grad_norm": 0.688513457775116,
"learning_rate": 2.5185309371787515e-06,
"loss": 0.5656,
"step": 952
},
{
"epoch": 2.0975788701393983,
"grad_norm": 0.7212648391723633,
"learning_rate": 2.507406912990098e-06,
"loss": 0.517,
"step": 953
},
{
"epoch": 2.0997798972854,
"grad_norm": 0.673067569732666,
"learning_rate": 2.4962992825668546e-06,
"loss": 0.5406,
"step": 954
},
{
"epoch": 2.1019809244314014,
"grad_norm": 0.6931125521659851,
"learning_rate": 2.4852081189638227e-06,
"loss": 0.5273,
"step": 955
},
{
"epoch": 2.1041819515774027,
"grad_norm": 0.7032851576805115,
"learning_rate": 2.4741334951274948e-06,
"loss": 0.5231,
"step": 956
},
{
"epoch": 2.106382978723404,
"grad_norm": 0.6988456845283508,
"learning_rate": 2.46307548389559e-06,
"loss": 0.5276,
"step": 957
},
{
"epoch": 2.108584005869406,
"grad_norm": 0.7143514752388,
"learning_rate": 2.452034157996559e-06,
"loss": 0.5512,
"step": 958
},
{
"epoch": 2.110785033015407,
"grad_norm": 0.6739227175712585,
"learning_rate": 2.441009590049118e-06,
"loss": 0.5253,
"step": 959
},
{
"epoch": 2.1129860601614086,
"grad_norm": 0.6597806215286255,
"learning_rate": 2.430001852561769e-06,
"loss": 0.5672,
"step": 960
},
{
"epoch": 2.11518708730741,
"grad_norm": 0.6939047574996948,
"learning_rate": 2.419011017932309e-06,
"loss": 0.5452,
"step": 961
},
{
"epoch": 2.1173881144534117,
"grad_norm": 0.6978014707565308,
"learning_rate": 2.408037158447375e-06,
"loss": 0.5428,
"step": 962
},
{
"epoch": 2.119589141599413,
"grad_norm": 0.6962845921516418,
"learning_rate": 2.3970803462819586e-06,
"loss": 0.5319,
"step": 963
},
{
"epoch": 2.1217901687454144,
"grad_norm": 0.738998293876648,
"learning_rate": 2.386140653498924e-06,
"loss": 0.5579,
"step": 964
},
{
"epoch": 2.123991195891416,
"grad_norm": 0.6681599617004395,
"learning_rate": 2.3752181520485497e-06,
"loss": 0.5237,
"step": 965
},
{
"epoch": 2.1261922230374175,
"grad_norm": 0.6826555132865906,
"learning_rate": 2.36431291376804e-06,
"loss": 0.5391,
"step": 966
},
{
"epoch": 2.128393250183419,
"grad_norm": 0.6950371861457825,
"learning_rate": 2.353425010381063e-06,
"loss": 0.5547,
"step": 967
},
{
"epoch": 2.13059427732942,
"grad_norm": 0.7582732439041138,
"learning_rate": 2.342554513497278e-06,
"loss": 0.5607,
"step": 968
},
{
"epoch": 2.132795304475422,
"grad_norm": 0.7009463906288147,
"learning_rate": 2.331701494611855e-06,
"loss": 0.5193,
"step": 969
},
{
"epoch": 2.1349963316214233,
"grad_norm": 0.7017679810523987,
"learning_rate": 2.320866025105016e-06,
"loss": 0.5305,
"step": 970
},
{
"epoch": 2.1371973587674247,
"grad_norm": 0.6899322867393494,
"learning_rate": 2.3100481762415642e-06,
"loss": 0.5142,
"step": 971
},
{
"epoch": 2.1393983859134265,
"grad_norm": 0.73470538854599,
"learning_rate": 2.2992480191704003e-06,
"loss": 0.5492,
"step": 972
},
{
"epoch": 2.141599413059428,
"grad_norm": 0.6968899965286255,
"learning_rate": 2.288465624924078e-06,
"loss": 0.534,
"step": 973
},
{
"epoch": 2.143800440205429,
"grad_norm": 0.6920455694198608,
"learning_rate": 2.277701064418321e-06,
"loss": 0.5077,
"step": 974
},
{
"epoch": 2.1460014673514305,
"grad_norm": 0.6632540225982666,
"learning_rate": 2.2669544084515578e-06,
"loss": 0.5098,
"step": 975
},
{
"epoch": 2.1482024944974323,
"grad_norm": 0.6753472089767456,
"learning_rate": 2.2562257277044645e-06,
"loss": 0.5258,
"step": 976
},
{
"epoch": 2.1504035216434336,
"grad_norm": 0.6799765825271606,
"learning_rate": 2.245515092739488e-06,
"loss": 0.5234,
"step": 977
},
{
"epoch": 2.152604548789435,
"grad_norm": 0.6979172229766846,
"learning_rate": 2.2348225740003927e-06,
"loss": 0.51,
"step": 978
},
{
"epoch": 2.1548055759354368,
"grad_norm": 0.7042593955993652,
"learning_rate": 2.224148241811794e-06,
"loss": 0.5478,
"step": 979
},
{
"epoch": 2.157006603081438,
"grad_norm": 0.6685957908630371,
"learning_rate": 2.2134921663786875e-06,
"loss": 0.5323,
"step": 980
},
{
"epoch": 2.1592076302274394,
"grad_norm": 0.6937947869300842,
"learning_rate": 2.2028544177860028e-06,
"loss": 0.5411,
"step": 981
},
{
"epoch": 2.161408657373441,
"grad_norm": 0.6986700892448425,
"learning_rate": 2.1922350659981262e-06,
"loss": 0.5571,
"step": 982
},
{
"epoch": 2.1636096845194426,
"grad_norm": 0.6792914271354675,
"learning_rate": 2.1816341808584564e-06,
"loss": 0.5391,
"step": 983
},
{
"epoch": 2.165810711665444,
"grad_norm": 0.6729667782783508,
"learning_rate": 2.171051832088928e-06,
"loss": 0.5389,
"step": 984
},
{
"epoch": 2.1680117388114453,
"grad_norm": 0.6751646995544434,
"learning_rate": 2.1604880892895707e-06,
"loss": 0.5467,
"step": 985
},
{
"epoch": 2.1702127659574466,
"grad_norm": 0.6553260087966919,
"learning_rate": 2.1499430219380357e-06,
"loss": 0.5156,
"step": 986
},
{
"epoch": 2.1724137931034484,
"grad_norm": 0.6624425053596497,
"learning_rate": 2.139416699389153e-06,
"loss": 0.4931,
"step": 987
},
{
"epoch": 2.1746148202494497,
"grad_norm": 0.6936183571815491,
"learning_rate": 2.128909190874461e-06,
"loss": 0.5617,
"step": 988
},
{
"epoch": 2.176815847395451,
"grad_norm": 0.6945171356201172,
"learning_rate": 2.118420565501762e-06,
"loss": 0.5242,
"step": 989
},
{
"epoch": 2.179016874541453,
"grad_norm": 0.7096056938171387,
"learning_rate": 2.107950892254668e-06,
"loss": 0.5296,
"step": 990
},
{
"epoch": 2.181217901687454,
"grad_norm": 0.6809952855110168,
"learning_rate": 2.097500239992132e-06,
"loss": 0.5345,
"step": 991
},
{
"epoch": 2.1834189288334556,
"grad_norm": 0.6936696767807007,
"learning_rate": 2.08706867744802e-06,
"loss": 0.5272,
"step": 992
},
{
"epoch": 2.185619955979457,
"grad_norm": 0.6907182931900024,
"learning_rate": 2.0766562732306323e-06,
"loss": 0.527,
"step": 993
},
{
"epoch": 2.1878209831254587,
"grad_norm": 0.6831082701683044,
"learning_rate": 2.0662630958222747e-06,
"loss": 0.5295,
"step": 994
},
{
"epoch": 2.19002201027146,
"grad_norm": 0.6951304078102112,
"learning_rate": 2.0558892135787927e-06,
"loss": 0.5025,
"step": 995
},
{
"epoch": 2.1922230374174614,
"grad_norm": 0.6927926540374756,
"learning_rate": 2.0455346947291277e-06,
"loss": 0.5337,
"step": 996
},
{
"epoch": 2.1944240645634627,
"grad_norm": 0.669577419757843,
"learning_rate": 2.0351996073748713e-06,
"loss": 0.5297,
"step": 997
},
{
"epoch": 2.1966250917094645,
"grad_norm": 0.7174883484840393,
"learning_rate": 2.0248840194898155e-06,
"loss": 0.5198,
"step": 998
},
{
"epoch": 2.198826118855466,
"grad_norm": 0.7197242379188538,
"learning_rate": 2.014587998919498e-06,
"loss": 0.5381,
"step": 999
},
{
"epoch": 2.201027146001467,
"grad_norm": 0.7077149748802185,
"learning_rate": 2.0043116133807673e-06,
"loss": 0.5518,
"step": 1000
},
{
"epoch": 2.203228173147469,
"grad_norm": 0.6633769273757935,
"learning_rate": 1.9940549304613334e-06,
"loss": 0.5247,
"step": 1001
},
{
"epoch": 2.2054292002934703,
"grad_norm": 0.6791988015174866,
"learning_rate": 1.983818017619318e-06,
"loss": 0.5595,
"step": 1002
},
{
"epoch": 2.2076302274394717,
"grad_norm": 0.7209838628768921,
"learning_rate": 1.9736009421828196e-06,
"loss": 0.5348,
"step": 1003
},
{
"epoch": 2.209831254585473,
"grad_norm": 0.7330271601676941,
"learning_rate": 1.963403771349461e-06,
"loss": 0.5488,
"step": 1004
},
{
"epoch": 2.212032281731475,
"grad_norm": 0.7162162661552429,
"learning_rate": 1.95322657218596e-06,
"loss": 0.5301,
"step": 1005
},
{
"epoch": 2.214233308877476,
"grad_norm": 0.6918371915817261,
"learning_rate": 1.9430694116276745e-06,
"loss": 0.5222,
"step": 1006
},
{
"epoch": 2.2164343360234775,
"grad_norm": 0.6973346471786499,
"learning_rate": 1.932932356478168e-06,
"loss": 0.5079,
"step": 1007
},
{
"epoch": 2.2186353631694793,
"grad_norm": 0.6862022876739502,
"learning_rate": 1.9228154734087766e-06,
"loss": 0.5178,
"step": 1008
},
{
"epoch": 2.2208363903154806,
"grad_norm": 0.7392538785934448,
"learning_rate": 1.912718828958163e-06,
"loss": 0.5343,
"step": 1009
},
{
"epoch": 2.223037417461482,
"grad_norm": 0.7012175917625427,
"learning_rate": 1.9026424895318762e-06,
"loss": 0.5226,
"step": 1010
},
{
"epoch": 2.2252384446074833,
"grad_norm": 0.6872196793556213,
"learning_rate": 1.892586521401924e-06,
"loss": 0.54,
"step": 1011
},
{
"epoch": 2.227439471753485,
"grad_norm": 0.7196520566940308,
"learning_rate": 1.8825509907063328e-06,
"loss": 0.5434,
"step": 1012
},
{
"epoch": 2.2296404988994865,
"grad_norm": 0.7016687393188477,
"learning_rate": 1.8725359634487068e-06,
"loss": 0.5401,
"step": 1013
},
{
"epoch": 2.231841526045488,
"grad_norm": 0.7230379581451416,
"learning_rate": 1.8625415054978058e-06,
"loss": 0.5374,
"step": 1014
},
{
"epoch": 2.2340425531914896,
"grad_norm": 0.6944716572761536,
"learning_rate": 1.8525676825870986e-06,
"loss": 0.5171,
"step": 1015
},
{
"epoch": 2.236243580337491,
"grad_norm": 0.6953030228614807,
"learning_rate": 1.8426145603143441e-06,
"loss": 0.5287,
"step": 1016
},
{
"epoch": 2.2384446074834923,
"grad_norm": 0.6746073961257935,
"learning_rate": 1.8326822041411524e-06,
"loss": 0.5366,
"step": 1017
},
{
"epoch": 2.2406456346294936,
"grad_norm": 0.6684333086013794,
"learning_rate": 1.8227706793925464e-06,
"loss": 0.5589,
"step": 1018
},
{
"epoch": 2.2428466617754954,
"grad_norm": 0.7026172876358032,
"learning_rate": 1.8128800512565514e-06,
"loss": 0.5185,
"step": 1019
},
{
"epoch": 2.2450476889214968,
"grad_norm": 0.7008723020553589,
"learning_rate": 1.8030103847837538e-06,
"loss": 0.5238,
"step": 1020
},
{
"epoch": 2.247248716067498,
"grad_norm": 0.664910078048706,
"learning_rate": 1.79316174488687e-06,
"loss": 0.5205,
"step": 1021
},
{
"epoch": 2.2494497432134994,
"grad_norm": 0.6518818140029907,
"learning_rate": 1.7833341963403312e-06,
"loss": 0.5464,
"step": 1022
},
{
"epoch": 2.2516507703595012,
"grad_norm": 0.6947062015533447,
"learning_rate": 1.7735278037798442e-06,
"loss": 0.5205,
"step": 1023
},
{
"epoch": 2.2538517975055026,
"grad_norm": 0.6576992273330688,
"learning_rate": 1.7637426317019801e-06,
"loss": 0.5299,
"step": 1024
},
{
"epoch": 2.256052824651504,
"grad_norm": 0.682668924331665,
"learning_rate": 1.7539787444637402e-06,
"loss": 0.5304,
"step": 1025
},
{
"epoch": 2.2582538517975053,
"grad_norm": 0.7044371962547302,
"learning_rate": 1.7442362062821323e-06,
"loss": 0.5437,
"step": 1026
},
{
"epoch": 2.260454878943507,
"grad_norm": 0.69700688123703,
"learning_rate": 1.7345150812337564e-06,
"loss": 0.5215,
"step": 1027
},
{
"epoch": 2.2626559060895084,
"grad_norm": 0.7171713709831238,
"learning_rate": 1.7248154332543788e-06,
"loss": 0.5397,
"step": 1028
},
{
"epoch": 2.2648569332355097,
"grad_norm": 0.6871711015701294,
"learning_rate": 1.715137326138504e-06,
"loss": 0.5133,
"step": 1029
},
{
"epoch": 2.2670579603815115,
"grad_norm": 0.6524863243103027,
"learning_rate": 1.7054808235389696e-06,
"loss": 0.5535,
"step": 1030
},
{
"epoch": 2.269258987527513,
"grad_norm": 0.7066587209701538,
"learning_rate": 1.6958459889665202e-06,
"loss": 0.5514,
"step": 1031
},
{
"epoch": 2.271460014673514,
"grad_norm": 0.7277991771697998,
"learning_rate": 1.6862328857893856e-06,
"loss": 0.523,
"step": 1032
},
{
"epoch": 2.2736610418195156,
"grad_norm": 0.6966294646263123,
"learning_rate": 1.6766415772328732e-06,
"loss": 0.5169,
"step": 1033
},
{
"epoch": 2.2758620689655173,
"grad_norm": 0.690147876739502,
"learning_rate": 1.667072126378942e-06,
"loss": 0.5451,
"step": 1034
},
{
"epoch": 2.2780630961115187,
"grad_norm": 0.6668645143508911,
"learning_rate": 1.6575245961657977e-06,
"loss": 0.5463,
"step": 1035
},
{
"epoch": 2.28026412325752,
"grad_norm": 0.6423293948173523,
"learning_rate": 1.6479990493874741e-06,
"loss": 0.4938,
"step": 1036
},
{
"epoch": 2.282465150403522,
"grad_norm": 0.6925761699676514,
"learning_rate": 1.6384955486934157e-06,
"loss": 0.5189,
"step": 1037
},
{
"epoch": 2.284666177549523,
"grad_norm": 0.6975389719009399,
"learning_rate": 1.6290141565880758e-06,
"loss": 0.5076,
"step": 1038
},
{
"epoch": 2.2868672046955245,
"grad_norm": 0.7112499475479126,
"learning_rate": 1.6195549354304952e-06,
"loss": 0.511,
"step": 1039
},
{
"epoch": 2.289068231841526,
"grad_norm": 0.6966331005096436,
"learning_rate": 1.610117947433897e-06,
"loss": 0.5156,
"step": 1040
},
{
"epoch": 2.2912692589875276,
"grad_norm": 0.6787795424461365,
"learning_rate": 1.6007032546652784e-06,
"loss": 0.5179,
"step": 1041
},
{
"epoch": 2.293470286133529,
"grad_norm": 0.6976974606513977,
"learning_rate": 1.5913109190450033e-06,
"loss": 0.5317,
"step": 1042
},
{
"epoch": 2.2956713132795303,
"grad_norm": 0.6754323840141296,
"learning_rate": 1.581941002346387e-06,
"loss": 0.5229,
"step": 1043
},
{
"epoch": 2.297872340425532,
"grad_norm": 0.6827859282493591,
"learning_rate": 1.5725935661953024e-06,
"loss": 0.5319,
"step": 1044
},
{
"epoch": 2.3000733675715335,
"grad_norm": 0.7029504179954529,
"learning_rate": 1.5632686720697604e-06,
"loss": 0.5633,
"step": 1045
},
{
"epoch": 2.302274394717535,
"grad_norm": 0.7189164161682129,
"learning_rate": 1.5539663812995204e-06,
"loss": 0.5384,
"step": 1046
},
{
"epoch": 2.304475421863536,
"grad_norm": 0.6952174305915833,
"learning_rate": 1.544686755065677e-06,
"loss": 0.5171,
"step": 1047
},
{
"epoch": 2.306676449009538,
"grad_norm": 0.6815386414527893,
"learning_rate": 1.5354298544002576e-06,
"loss": 0.5384,
"step": 1048
},
{
"epoch": 2.3088774761555393,
"grad_norm": 0.701146125793457,
"learning_rate": 1.526195740185829e-06,
"loss": 0.5398,
"step": 1049
},
{
"epoch": 2.3110785033015406,
"grad_norm": 0.683167040348053,
"learning_rate": 1.516984473155086e-06,
"loss": 0.5673,
"step": 1050
},
{
"epoch": 2.3132795304475424,
"grad_norm": 0.6639583110809326,
"learning_rate": 1.5077961138904628e-06,
"loss": 0.5363,
"step": 1051
},
{
"epoch": 2.3154805575935438,
"grad_norm": 0.6760245561599731,
"learning_rate": 1.4986307228237268e-06,
"loss": 0.5239,
"step": 1052
},
{
"epoch": 2.317681584739545,
"grad_norm": 0.7063462138175964,
"learning_rate": 1.4894883602355808e-06,
"loss": 0.5385,
"step": 1053
},
{
"epoch": 2.3198826118855465,
"grad_norm": 0.6705374121665955,
"learning_rate": 1.4803690862552755e-06,
"loss": 0.5341,
"step": 1054
},
{
"epoch": 2.3220836390315482,
"grad_norm": 0.6862072944641113,
"learning_rate": 1.4712729608602062e-06,
"loss": 0.5115,
"step": 1055
},
{
"epoch": 2.3242846661775496,
"grad_norm": 0.677196741104126,
"learning_rate": 1.4622000438755157e-06,
"loss": 0.538,
"step": 1056
},
{
"epoch": 2.326485693323551,
"grad_norm": 0.6774702072143555,
"learning_rate": 1.4531503949737107e-06,
"loss": 0.5335,
"step": 1057
},
{
"epoch": 2.3286867204695523,
"grad_norm": 0.6992412805557251,
"learning_rate": 1.444124073674264e-06,
"loss": 0.5252,
"step": 1058
},
{
"epoch": 2.330887747615554,
"grad_norm": 0.6966509819030762,
"learning_rate": 1.4351211393432162e-06,
"loss": 0.5306,
"step": 1059
},
{
"epoch": 2.3330887747615554,
"grad_norm": 0.6871675252914429,
"learning_rate": 1.4261416511928012e-06,
"loss": 0.5355,
"step": 1060
},
{
"epoch": 2.3352898019075568,
"grad_norm": 0.6746228337287903,
"learning_rate": 1.4171856682810386e-06,
"loss": 0.5103,
"step": 1061
},
{
"epoch": 2.337490829053558,
"grad_norm": 0.6926625370979309,
"learning_rate": 1.4082532495113627e-06,
"loss": 0.5207,
"step": 1062
},
{
"epoch": 2.33969185619956,
"grad_norm": 0.6979734897613525,
"learning_rate": 1.3993444536322204e-06,
"loss": 0.5139,
"step": 1063
},
{
"epoch": 2.3418928833455612,
"grad_norm": 0.6511772871017456,
"learning_rate": 1.3904593392366916e-06,
"loss": 0.5128,
"step": 1064
},
{
"epoch": 2.3440939104915626,
"grad_norm": 0.6979479789733887,
"learning_rate": 1.3815979647621063e-06,
"loss": 0.522,
"step": 1065
},
{
"epoch": 2.3462949376375644,
"grad_norm": 0.6785731911659241,
"learning_rate": 1.3727603884896578e-06,
"loss": 0.519,
"step": 1066
},
{
"epoch": 2.3484959647835657,
"grad_norm": 0.7067009806632996,
"learning_rate": 1.3639466685440133e-06,
"loss": 0.5161,
"step": 1067
},
{
"epoch": 2.350696991929567,
"grad_norm": 0.6423737406730652,
"learning_rate": 1.3551568628929434e-06,
"loss": 0.5264,
"step": 1068
},
{
"epoch": 2.3528980190755684,
"grad_norm": 0.6567428112030029,
"learning_rate": 1.346391029346929e-06,
"loss": 0.5045,
"step": 1069
},
{
"epoch": 2.35509904622157,
"grad_norm": 0.6590611934661865,
"learning_rate": 1.3376492255587909e-06,
"loss": 0.5227,
"step": 1070
},
{
"epoch": 2.3573000733675715,
"grad_norm": 0.6755092144012451,
"learning_rate": 1.3289315090233056e-06,
"loss": 0.5133,
"step": 1071
},
{
"epoch": 2.359501100513573,
"grad_norm": 0.7399182319641113,
"learning_rate": 1.3202379370768254e-06,
"loss": 0.5295,
"step": 1072
},
{
"epoch": 2.3617021276595747,
"grad_norm": 0.6728177666664124,
"learning_rate": 1.3115685668969075e-06,
"loss": 0.5226,
"step": 1073
},
{
"epoch": 2.363903154805576,
"grad_norm": 0.6400392651557922,
"learning_rate": 1.3029234555019315e-06,
"loss": 0.5443,
"step": 1074
},
{
"epoch": 2.3661041819515773,
"grad_norm": 0.6435502767562866,
"learning_rate": 1.2943026597507268e-06,
"loss": 0.5207,
"step": 1075
},
{
"epoch": 2.3683052090975787,
"grad_norm": 0.6713318824768066,
"learning_rate": 1.2857062363422007e-06,
"loss": 0.5239,
"step": 1076
},
{
"epoch": 2.3705062362435805,
"grad_norm": 0.6874515414237976,
"learning_rate": 1.2771342418149658e-06,
"loss": 0.5302,
"step": 1077
},
{
"epoch": 2.372707263389582,
"grad_norm": 0.6752180457115173,
"learning_rate": 1.2685867325469603e-06,
"loss": 0.5261,
"step": 1078
},
{
"epoch": 2.374908290535583,
"grad_norm": 0.6637091040611267,
"learning_rate": 1.2600637647550889e-06,
"loss": 0.5309,
"step": 1079
},
{
"epoch": 2.377109317681585,
"grad_norm": 0.7188436388969421,
"learning_rate": 1.2515653944948424e-06,
"loss": 0.5198,
"step": 1080
},
{
"epoch": 2.3793103448275863,
"grad_norm": 0.6852547526359558,
"learning_rate": 1.2430916776599378e-06,
"loss": 0.4992,
"step": 1081
},
{
"epoch": 2.3815113719735876,
"grad_norm": 0.6739503145217896,
"learning_rate": 1.234642669981946e-06,
"loss": 0.5407,
"step": 1082
},
{
"epoch": 2.383712399119589,
"grad_norm": 0.6844504475593567,
"learning_rate": 1.2262184270299215e-06,
"loss": 0.5277,
"step": 1083
},
{
"epoch": 2.3859134262655908,
"grad_norm": 0.682556688785553,
"learning_rate": 1.217819004210049e-06,
"loss": 0.531,
"step": 1084
},
{
"epoch": 2.388114453411592,
"grad_norm": 0.6876649856567383,
"learning_rate": 1.2094444567652652e-06,
"loss": 0.5388,
"step": 1085
},
{
"epoch": 2.3903154805575935,
"grad_norm": 0.7046743631362915,
"learning_rate": 1.2010948397749022e-06,
"loss": 0.5294,
"step": 1086
},
{
"epoch": 2.3925165077035953,
"grad_norm": 0.690581202507019,
"learning_rate": 1.1927702081543279e-06,
"loss": 0.5268,
"step": 1087
},
{
"epoch": 2.3947175348495966,
"grad_norm": 0.6656055450439453,
"learning_rate": 1.1844706166545811e-06,
"loss": 0.5297,
"step": 1088
},
{
"epoch": 2.396918561995598,
"grad_norm": 0.677450954914093,
"learning_rate": 1.1761961198620081e-06,
"loss": 0.557,
"step": 1089
},
{
"epoch": 2.3991195891415993,
"grad_norm": 0.6376323103904724,
"learning_rate": 1.1679467721979132e-06,
"loss": 0.531,
"step": 1090
},
{
"epoch": 2.401320616287601,
"grad_norm": 0.6878024339675903,
"learning_rate": 1.159722627918189e-06,
"loss": 0.5307,
"step": 1091
},
{
"epoch": 2.4035216434336024,
"grad_norm": 0.7167958617210388,
"learning_rate": 1.1515237411129698e-06,
"loss": 0.5441,
"step": 1092
},
{
"epoch": 2.4057226705796038,
"grad_norm": 0.664085865020752,
"learning_rate": 1.1433501657062723e-06,
"loss": 0.5294,
"step": 1093
},
{
"epoch": 2.407923697725605,
"grad_norm": 0.6956909894943237,
"learning_rate": 1.135201955455636e-06,
"loss": 0.5271,
"step": 1094
},
{
"epoch": 2.410124724871607,
"grad_norm": 0.6617448329925537,
"learning_rate": 1.1270791639517786e-06,
"loss": 0.5388,
"step": 1095
},
{
"epoch": 2.4123257520176082,
"grad_norm": 0.6781307458877563,
"learning_rate": 1.118981844618236e-06,
"loss": 0.5258,
"step": 1096
},
{
"epoch": 2.4145267791636096,
"grad_norm": 0.6830940246582031,
"learning_rate": 1.1109100507110133e-06,
"loss": 0.5126,
"step": 1097
},
{
"epoch": 2.416727806309611,
"grad_norm": 0.6359250545501709,
"learning_rate": 1.1028638353182392e-06,
"loss": 0.5078,
"step": 1098
},
{
"epoch": 2.4189288334556127,
"grad_norm": 0.6963909268379211,
"learning_rate": 1.0948432513598073e-06,
"loss": 0.5104,
"step": 1099
},
{
"epoch": 2.421129860601614,
"grad_norm": 0.6965398788452148,
"learning_rate": 1.086848351587037e-06,
"loss": 0.5259,
"step": 1100
},
{
"epoch": 2.4233308877476154,
"grad_norm": 0.6796557903289795,
"learning_rate": 1.0788791885823236e-06,
"loss": 0.5237,
"step": 1101
},
{
"epoch": 2.425531914893617,
"grad_norm": 0.6518641114234924,
"learning_rate": 1.0709358147587883e-06,
"loss": 0.5442,
"step": 1102
},
{
"epoch": 2.4277329420396185,
"grad_norm": 0.6805407404899597,
"learning_rate": 1.06301828235994e-06,
"loss": 0.5367,
"step": 1103
},
{
"epoch": 2.42993396918562,
"grad_norm": 0.666123628616333,
"learning_rate": 1.0551266434593293e-06,
"loss": 0.5211,
"step": 1104
},
{
"epoch": 2.4321349963316212,
"grad_norm": 0.7070630788803101,
"learning_rate": 1.0472609499602017e-06,
"loss": 0.5584,
"step": 1105
},
{
"epoch": 2.434336023477623,
"grad_norm": 0.6542832851409912,
"learning_rate": 1.0394212535951642e-06,
"loss": 0.504,
"step": 1106
},
{
"epoch": 2.4365370506236244,
"grad_norm": 0.6602501273155212,
"learning_rate": 1.031607605925839e-06,
"loss": 0.4941,
"step": 1107
},
{
"epoch": 2.4387380777696257,
"grad_norm": 0.684209942817688,
"learning_rate": 1.023820058342524e-06,
"loss": 0.5213,
"step": 1108
},
{
"epoch": 2.4409391049156275,
"grad_norm": 0.6626037955284119,
"learning_rate": 1.016058662063862e-06,
"loss": 0.5352,
"step": 1109
},
{
"epoch": 2.443140132061629,
"grad_norm": 0.6796350479125977,
"learning_rate": 1.0083234681364934e-06,
"loss": 0.5353,
"step": 1110
},
{
"epoch": 2.44534115920763,
"grad_norm": 0.6874199509620667,
"learning_rate": 1.0006145274347306e-06,
"loss": 0.5167,
"step": 1111
},
{
"epoch": 2.4475421863536315,
"grad_norm": 0.7013456225395203,
"learning_rate": 9.929318906602176e-07,
"loss": 0.5249,
"step": 1112
},
{
"epoch": 2.4497432134996333,
"grad_norm": 0.686331570148468,
"learning_rate": 9.852756083415944e-07,
"loss": 0.5282,
"step": 1113
},
{
"epoch": 2.4519442406456347,
"grad_norm": 0.6578436493873596,
"learning_rate": 9.776457308341735e-07,
"loss": 0.5102,
"step": 1114
},
{
"epoch": 2.454145267791636,
"grad_norm": 0.7160325646400452,
"learning_rate": 9.70042308319597e-07,
"loss": 0.5332,
"step": 1115
},
{
"epoch": 2.456346294937638,
"grad_norm": 0.6882801651954651,
"learning_rate": 9.62465390805517e-07,
"loss": 0.4939,
"step": 1116
},
{
"epoch": 2.458547322083639,
"grad_norm": 0.6668509244918823,
"learning_rate": 9.549150281252633e-07,
"loss": 0.5222,
"step": 1117
},
{
"epoch": 2.4607483492296405,
"grad_norm": 0.6786191463470459,
"learning_rate": 9.473912699375093e-07,
"loss": 0.5791,
"step": 1118
},
{
"epoch": 2.462949376375642,
"grad_norm": 0.6751823425292969,
"learning_rate": 9.398941657259575e-07,
"loss": 0.5408,
"step": 1119
},
{
"epoch": 2.4651504035216436,
"grad_norm": 0.6627745032310486,
"learning_rate": 9.324237647990026e-07,
"loss": 0.5233,
"step": 1120
},
{
"epoch": 2.467351430667645,
"grad_norm": 0.6707773208618164,
"learning_rate": 9.249801162894123e-07,
"loss": 0.5335,
"step": 1121
},
{
"epoch": 2.4695524578136463,
"grad_norm": 0.6872125864028931,
"learning_rate": 9.175632691540065e-07,
"loss": 0.521,
"step": 1122
},
{
"epoch": 2.471753484959648,
"grad_norm": 0.6743152141571045,
"learning_rate": 9.101732721733325e-07,
"loss": 0.5388,
"step": 1123
},
{
"epoch": 2.4739545121056494,
"grad_norm": 0.6818563938140869,
"learning_rate": 9.028101739513406e-07,
"loss": 0.5533,
"step": 1124
},
{
"epoch": 2.4761555392516508,
"grad_norm": 0.6874618530273438,
"learning_rate": 8.954740229150732e-07,
"loss": 0.528,
"step": 1125
},
{
"epoch": 2.478356566397652,
"grad_norm": 0.6738033294677734,
"learning_rate": 8.881648673143367e-07,
"loss": 0.5086,
"step": 1126
},
{
"epoch": 2.4805575935436535,
"grad_norm": 0.6714388728141785,
"learning_rate": 8.808827552213917e-07,
"loss": 0.5324,
"step": 1127
},
{
"epoch": 2.4827586206896552,
"grad_norm": 0.6772189736366272,
"learning_rate": 8.736277345306343e-07,
"loss": 0.5265,
"step": 1128
},
{
"epoch": 2.4849596478356566,
"grad_norm": 0.6666496396064758,
"learning_rate": 8.663998529582768e-07,
"loss": 0.5367,
"step": 1129
},
{
"epoch": 2.487160674981658,
"grad_norm": 0.6771166324615479,
"learning_rate": 8.591991580420422e-07,
"loss": 0.5311,
"step": 1130
},
{
"epoch": 2.4893617021276597,
"grad_norm": 0.6526003479957581,
"learning_rate": 8.520256971408453e-07,
"loss": 0.5147,
"step": 1131
},
{
"epoch": 2.491562729273661,
"grad_norm": 0.6972113251686096,
"learning_rate": 8.448795174344803e-07,
"loss": 0.5223,
"step": 1132
},
{
"epoch": 2.4937637564196624,
"grad_norm": 0.6587761640548706,
"learning_rate": 8.377606659233179e-07,
"loss": 0.5466,
"step": 1133
},
{
"epoch": 2.4959647835656638,
"grad_norm": 0.6860453486442566,
"learning_rate": 8.306691894279894e-07,
"loss": 0.5382,
"step": 1134
},
{
"epoch": 2.4981658107116655,
"grad_norm": 0.659783661365509,
"learning_rate": 8.2360513458908e-07,
"loss": 0.5388,
"step": 1135
},
{
"epoch": 2.500366837857667,
"grad_norm": 0.6612713932991028,
"learning_rate": 8.16568547866824e-07,
"loss": 0.5081,
"step": 1136
},
{
"epoch": 2.5025678650036682,
"grad_norm": 0.677689254283905,
"learning_rate": 8.095594755407971e-07,
"loss": 0.5355,
"step": 1137
},
{
"epoch": 2.50476889214967,
"grad_norm": 0.6722758412361145,
"learning_rate": 8.025779637096138e-07,
"loss": 0.5202,
"step": 1138
},
{
"epoch": 2.5069699192956714,
"grad_norm": 0.6744994521141052,
"learning_rate": 7.956240582906244e-07,
"loss": 0.5294,
"step": 1139
},
{
"epoch": 2.5091709464416727,
"grad_norm": 0.6862902045249939,
"learning_rate": 7.886978050196093e-07,
"loss": 0.5271,
"step": 1140
},
{
"epoch": 2.511371973587674,
"grad_norm": 0.6579144597053528,
"learning_rate": 7.817992494504844e-07,
"loss": 0.5508,
"step": 1141
},
{
"epoch": 2.513573000733676,
"grad_norm": 0.656959056854248,
"learning_rate": 7.749284369549954e-07,
"loss": 0.5513,
"step": 1142
},
{
"epoch": 2.515774027879677,
"grad_norm": 0.6645578145980835,
"learning_rate": 7.680854127224213e-07,
"loss": 0.5438,
"step": 1143
},
{
"epoch": 2.5179750550256785,
"grad_norm": 0.6550148129463196,
"learning_rate": 7.612702217592816e-07,
"loss": 0.5491,
"step": 1144
},
{
"epoch": 2.5201760821716803,
"grad_norm": 0.6535680890083313,
"learning_rate": 7.544829088890326e-07,
"loss": 0.5389,
"step": 1145
},
{
"epoch": 2.5223771093176817,
"grad_norm": 0.6864288449287415,
"learning_rate": 7.477235187517795e-07,
"loss": 0.5661,
"step": 1146
},
{
"epoch": 2.524578136463683,
"grad_norm": 0.6564839482307434,
"learning_rate": 7.409920958039795e-07,
"loss": 0.5404,
"step": 1147
},
{
"epoch": 2.5267791636096844,
"grad_norm": 0.6699473857879639,
"learning_rate": 7.342886843181479e-07,
"loss": 0.5279,
"step": 1148
},
{
"epoch": 2.528980190755686,
"grad_norm": 0.6886346936225891,
"learning_rate": 7.276133283825698e-07,
"loss": 0.5563,
"step": 1149
},
{
"epoch": 2.5311812179016875,
"grad_norm": 0.648545503616333,
"learning_rate": 7.209660719010119e-07,
"loss": 0.5402,
"step": 1150
},
{
"epoch": 2.533382245047689,
"grad_norm": 0.674880862236023,
"learning_rate": 7.143469585924251e-07,
"loss": 0.5113,
"step": 1151
},
{
"epoch": 2.5355832721936906,
"grad_norm": 0.6864522099494934,
"learning_rate": 7.077560319906696e-07,
"loss": 0.5505,
"step": 1152
},
{
"epoch": 2.537784299339692,
"grad_norm": 0.6706079244613647,
"learning_rate": 7.011933354442168e-07,
"loss": 0.5417,
"step": 1153
},
{
"epoch": 2.5399853264856933,
"grad_norm": 0.6655914187431335,
"learning_rate": 6.946589121158703e-07,
"loss": 0.5128,
"step": 1154
},
{
"epoch": 2.5421863536316947,
"grad_norm": 0.6507923007011414,
"learning_rate": 6.881528049824837e-07,
"loss": 0.5477,
"step": 1155
},
{
"epoch": 2.544387380777696,
"grad_norm": 0.6709318161010742,
"learning_rate": 6.816750568346708e-07,
"loss": 0.5553,
"step": 1156
},
{
"epoch": 2.546588407923698,
"grad_norm": 0.6583144068717957,
"learning_rate": 6.752257102765325e-07,
"loss": 0.5338,
"step": 1157
},
{
"epoch": 2.548789435069699,
"grad_norm": 0.666934072971344,
"learning_rate": 6.688048077253712e-07,
"loss": 0.556,
"step": 1158
},
{
"epoch": 2.550990462215701,
"grad_norm": 0.6553151607513428,
"learning_rate": 6.624123914114122e-07,
"loss": 0.5132,
"step": 1159
},
{
"epoch": 2.5531914893617023,
"grad_norm": 0.6793770790100098,
"learning_rate": 6.560485033775299e-07,
"loss": 0.536,
"step": 1160
},
{
"epoch": 2.5553925165077036,
"grad_norm": 0.6784916520118713,
"learning_rate": 6.49713185478964e-07,
"loss": 0.5621,
"step": 1161
},
{
"epoch": 2.557593543653705,
"grad_norm": 0.6523611545562744,
"learning_rate": 6.43406479383053e-07,
"loss": 0.5293,
"step": 1162
},
{
"epoch": 2.5597945707997063,
"grad_norm": 0.683592677116394,
"learning_rate": 6.371284265689543e-07,
"loss": 0.5195,
"step": 1163
},
{
"epoch": 2.561995597945708,
"grad_norm": 0.6984937787055969,
"learning_rate": 6.308790683273719e-07,
"loss": 0.515,
"step": 1164
},
{
"epoch": 2.5641966250917094,
"grad_norm": 0.6644624471664429,
"learning_rate": 6.24658445760285e-07,
"loss": 0.5249,
"step": 1165
},
{
"epoch": 2.5663976522377108,
"grad_norm": 0.6549161076545715,
"learning_rate": 6.184665997806832e-07,
"loss": 0.5183,
"step": 1166
},
{
"epoch": 2.5685986793837126,
"grad_norm": 0.6416719555854797,
"learning_rate": 6.12303571112286e-07,
"loss": 0.5313,
"step": 1167
},
{
"epoch": 2.570799706529714,
"grad_norm": 0.6797274947166443,
"learning_rate": 6.06169400289287e-07,
"loss": 0.5366,
"step": 1168
},
{
"epoch": 2.5730007336757152,
"grad_norm": 0.6692672371864319,
"learning_rate": 6.000641276560814e-07,
"loss": 0.5256,
"step": 1169
},
{
"epoch": 2.5752017608217166,
"grad_norm": 0.6283913254737854,
"learning_rate": 5.93987793366998e-07,
"loss": 0.5436,
"step": 1170
},
{
"epoch": 2.5774027879677184,
"grad_norm": 0.6609349250793457,
"learning_rate": 5.879404373860415e-07,
"loss": 0.5457,
"step": 1171
},
{
"epoch": 2.5796038151137197,
"grad_norm": 0.6875924468040466,
"learning_rate": 5.819220994866237e-07,
"loss": 0.5549,
"step": 1172
},
{
"epoch": 2.581804842259721,
"grad_norm": 0.6782329082489014,
"learning_rate": 5.759328192513075e-07,
"loss": 0.5226,
"step": 1173
},
{
"epoch": 2.584005869405723,
"grad_norm": 0.6453458070755005,
"learning_rate": 5.699726360715435e-07,
"loss": 0.5198,
"step": 1174
},
{
"epoch": 2.586206896551724,
"grad_norm": 0.6338275074958801,
"learning_rate": 5.640415891474094e-07,
"loss": 0.5397,
"step": 1175
},
{
"epoch": 2.5884079236977255,
"grad_norm": 0.6670948266983032,
"learning_rate": 5.581397174873532e-07,
"loss": 0.5307,
"step": 1176
},
{
"epoch": 2.590608950843727,
"grad_norm": 0.6600770354270935,
"learning_rate": 5.522670599079416e-07,
"loss": 0.5148,
"step": 1177
},
{
"epoch": 2.5928099779897287,
"grad_norm": 0.6838109493255615,
"learning_rate": 5.464236550335961e-07,
"loss": 0.5501,
"step": 1178
},
{
"epoch": 2.59501100513573,
"grad_norm": 0.659791886806488,
"learning_rate": 5.406095412963464e-07,
"loss": 0.5389,
"step": 1179
},
{
"epoch": 2.5972120322817314,
"grad_norm": 0.6598541736602783,
"learning_rate": 5.348247569355736e-07,
"loss": 0.5416,
"step": 1180
},
{
"epoch": 2.599413059427733,
"grad_norm": 0.6551116108894348,
"learning_rate": 5.290693399977581e-07,
"loss": 0.5025,
"step": 1181
},
{
"epoch": 2.6016140865737345,
"grad_norm": 0.674087643623352,
"learning_rate": 5.233433283362349e-07,
"loss": 0.5193,
"step": 1182
},
{
"epoch": 2.603815113719736,
"grad_norm": 0.6917135715484619,
"learning_rate": 5.176467596109358e-07,
"loss": 0.5409,
"step": 1183
},
{
"epoch": 2.606016140865737,
"grad_norm": 0.6740174889564514,
"learning_rate": 5.119796712881498e-07,
"loss": 0.5463,
"step": 1184
},
{
"epoch": 2.608217168011739,
"grad_norm": 0.645552396774292,
"learning_rate": 5.063421006402747e-07,
"loss": 0.5364,
"step": 1185
},
{
"epoch": 2.6104181951577403,
"grad_norm": 0.6733747720718384,
"learning_rate": 5.007340847455667e-07,
"loss": 0.5381,
"step": 1186
},
{
"epoch": 2.6126192223037417,
"grad_norm": 0.6768305897712708,
"learning_rate": 4.951556604879049e-07,
"loss": 0.5464,
"step": 1187
},
{
"epoch": 2.6148202494497435,
"grad_norm": 0.6372972726821899,
"learning_rate": 4.896068645565405e-07,
"loss": 0.5398,
"step": 1188
},
{
"epoch": 2.617021276595745,
"grad_norm": 0.6610989570617676,
"learning_rate": 4.840877334458615e-07,
"loss": 0.5352,
"step": 1189
},
{
"epoch": 2.619222303741746,
"grad_norm": 0.6962060928344727,
"learning_rate": 4.785983034551523e-07,
"loss": 0.5098,
"step": 1190
},
{
"epoch": 2.6214233308877475,
"grad_norm": 0.674565851688385,
"learning_rate": 4.731386106883484e-07,
"loss": 0.5296,
"step": 1191
},
{
"epoch": 2.623624358033749,
"grad_norm": 0.6853401064872742,
"learning_rate": 4.677086910538092e-07,
"loss": 0.5241,
"step": 1192
},
{
"epoch": 2.6258253851797506,
"grad_norm": 0.6350346207618713,
"learning_rate": 4.6230858026407364e-07,
"loss": 0.5543,
"step": 1193
},
{
"epoch": 2.628026412325752,
"grad_norm": 0.6491067409515381,
"learning_rate": 4.569383138356276e-07,
"loss": 0.5356,
"step": 1194
},
{
"epoch": 2.6302274394717537,
"grad_norm": 0.6713163256645203,
"learning_rate": 4.515979270886728e-07,
"loss": 0.5428,
"step": 1195
},
{
"epoch": 2.632428466617755,
"grad_norm": 0.6898890733718872,
"learning_rate": 4.4628745514689154e-07,
"loss": 0.5212,
"step": 1196
},
{
"epoch": 2.6346294937637564,
"grad_norm": 0.7005847692489624,
"learning_rate": 4.410069329372152e-07,
"loss": 0.4808,
"step": 1197
},
{
"epoch": 2.636830520909758,
"grad_norm": 0.6773681044578552,
"learning_rate": 4.357563951895988e-07,
"loss": 0.5139,
"step": 1198
},
{
"epoch": 2.639031548055759,
"grad_norm": 0.6664858460426331,
"learning_rate": 4.305358764367884e-07,
"loss": 0.5483,
"step": 1199
},
{
"epoch": 2.641232575201761,
"grad_norm": 0.677723228931427,
"learning_rate": 4.253454110140942e-07,
"loss": 0.5246,
"step": 1200
},
{
"epoch": 2.6434336023477623,
"grad_norm": 0.6912497282028198,
"learning_rate": 4.201850330591678e-07,
"loss": 0.5323,
"step": 1201
},
{
"epoch": 2.6456346294937636,
"grad_norm": 0.6718654632568359,
"learning_rate": 4.150547765117746e-07,
"loss": 0.5445,
"step": 1202
},
{
"epoch": 2.6478356566397654,
"grad_norm": 0.6588461995124817,
"learning_rate": 4.0995467511357246e-07,
"loss": 0.5249,
"step": 1203
},
{
"epoch": 2.6500366837857667,
"grad_norm": 0.6795045137405396,
"learning_rate": 4.0488476240789e-07,
"loss": 0.5325,
"step": 1204
},
{
"epoch": 2.652237710931768,
"grad_norm": 0.6509596109390259,
"learning_rate": 3.9984507173950136e-07,
"loss": 0.5404,
"step": 1205
},
{
"epoch": 2.6544387380777694,
"grad_norm": 0.6590747833251953,
"learning_rate": 3.9483563625441424e-07,
"loss": 0.5273,
"step": 1206
},
{
"epoch": 2.656639765223771,
"grad_norm": 0.6651404500007629,
"learning_rate": 3.8985648889964755e-07,
"loss": 0.5234,
"step": 1207
},
{
"epoch": 2.6588407923697726,
"grad_norm": 0.6637810468673706,
"learning_rate": 3.8490766242301356e-07,
"loss": 0.5445,
"step": 1208
},
{
"epoch": 2.661041819515774,
"grad_norm": 0.6318773627281189,
"learning_rate": 3.7998918937290686e-07,
"loss": 0.5018,
"step": 1209
},
{
"epoch": 2.6632428466617757,
"grad_norm": 0.6627768278121948,
"learning_rate": 3.7510110209808657e-07,
"loss": 0.5078,
"step": 1210
},
{
"epoch": 2.665443873807777,
"grad_norm": 0.6809104084968567,
"learning_rate": 3.70243432747463e-07,
"loss": 0.5114,
"step": 1211
},
{
"epoch": 2.6676449009537784,
"grad_norm": 0.6351826190948486,
"learning_rate": 3.6541621326989183e-07,
"loss": 0.5114,
"step": 1212
},
{
"epoch": 2.6698459280997797,
"grad_norm": 0.661004900932312,
"learning_rate": 3.606194754139569e-07,
"loss": 0.5147,
"step": 1213
},
{
"epoch": 2.6720469552457815,
"grad_norm": 0.6592134833335876,
"learning_rate": 3.5585325072776625e-07,
"loss": 0.5232,
"step": 1214
},
{
"epoch": 2.674247982391783,
"grad_norm": 0.6718369722366333,
"learning_rate": 3.511175705587433e-07,
"loss": 0.5013,
"step": 1215
},
{
"epoch": 2.676449009537784,
"grad_norm": 0.6566296815872192,
"learning_rate": 3.464124660534191e-07,
"loss": 0.508,
"step": 1216
},
{
"epoch": 2.678650036683786,
"grad_norm": 0.6743835210800171,
"learning_rate": 3.417379681572297e-07,
"loss": 0.5518,
"step": 1217
},
{
"epoch": 2.6808510638297873,
"grad_norm": 0.6525498628616333,
"learning_rate": 3.3709410761431136e-07,
"loss": 0.5478,
"step": 1218
},
{
"epoch": 2.6830520909757887,
"grad_norm": 0.6631609201431274,
"learning_rate": 3.324809149672992e-07,
"loss": 0.516,
"step": 1219
},
{
"epoch": 2.68525311812179,
"grad_norm": 0.6674198508262634,
"learning_rate": 3.278984205571262e-07,
"loss": 0.5316,
"step": 1220
},
{
"epoch": 2.6874541452677914,
"grad_norm": 0.667265772819519,
"learning_rate": 3.2334665452282143e-07,
"loss": 0.526,
"step": 1221
},
{
"epoch": 2.689655172413793,
"grad_norm": 0.6764647364616394,
"learning_rate": 3.18825646801314e-07,
"loss": 0.5334,
"step": 1222
},
{
"epoch": 2.6918561995597945,
"grad_norm": 0.6340264081954956,
"learning_rate": 3.143354271272392e-07,
"loss": 0.5303,
"step": 1223
},
{
"epoch": 2.6940572267057963,
"grad_norm": 0.6302101612091064,
"learning_rate": 3.098760250327343e-07,
"loss": 0.5026,
"step": 1224
},
{
"epoch": 2.6962582538517976,
"grad_norm": 0.6636278033256531,
"learning_rate": 3.054474698472537e-07,
"loss": 0.5153,
"step": 1225
},
{
"epoch": 2.698459280997799,
"grad_norm": 0.670258641242981,
"learning_rate": 3.010497906973714e-07,
"loss": 0.5293,
"step": 1226
},
{
"epoch": 2.7006603081438003,
"grad_norm": 0.6790749430656433,
"learning_rate": 2.966830165065876e-07,
"loss": 0.5262,
"step": 1227
},
{
"epoch": 2.7028613352898017,
"grad_norm": 0.64798504114151,
"learning_rate": 2.92347175995143e-07,
"loss": 0.5474,
"step": 1228
},
{
"epoch": 2.7050623624358034,
"grad_norm": 0.6917140483856201,
"learning_rate": 2.8804229767982637e-07,
"loss": 0.5123,
"step": 1229
},
{
"epoch": 2.707263389581805,
"grad_norm": 0.6865553855895996,
"learning_rate": 2.837684098737892e-07,
"loss": 0.5364,
"step": 1230
},
{
"epoch": 2.7094644167278066,
"grad_norm": 0.6665230989456177,
"learning_rate": 2.795255406863595e-07,
"loss": 0.5295,
"step": 1231
},
{
"epoch": 2.711665443873808,
"grad_norm": 0.6777942180633545,
"learning_rate": 2.7531371802285436e-07,
"loss": 0.5274,
"step": 1232
},
{
"epoch": 2.7138664710198093,
"grad_norm": 0.6679130792617798,
"learning_rate": 2.711329695843978e-07,
"loss": 0.5378,
"step": 1233
},
{
"epoch": 2.7160674981658106,
"grad_norm": 0.6932915449142456,
"learning_rate": 2.6698332286774153e-07,
"loss": 0.5421,
"step": 1234
},
{
"epoch": 2.718268525311812,
"grad_norm": 0.6876216530799866,
"learning_rate": 2.628648051650784e-07,
"loss": 0.5285,
"step": 1235
},
{
"epoch": 2.7204695524578137,
"grad_norm": 0.6684898734092712,
"learning_rate": 2.587774435638679e-07,
"loss": 0.5378,
"step": 1236
},
{
"epoch": 2.722670579603815,
"grad_norm": 0.6850000619888306,
"learning_rate": 2.547212649466568e-07,
"loss": 0.5466,
"step": 1237
},
{
"epoch": 2.7248716067498164,
"grad_norm": 0.6805734038352966,
"learning_rate": 2.5069629599089874e-07,
"loss": 0.5246,
"step": 1238
},
{
"epoch": 2.727072633895818,
"grad_norm": 0.64240962266922,
"learning_rate": 2.467025631687847e-07,
"loss": 0.5561,
"step": 1239
},
{
"epoch": 2.7292736610418196,
"grad_norm": 0.6709468364715576,
"learning_rate": 2.4274009274706244e-07,
"loss": 0.5271,
"step": 1240
},
{
"epoch": 2.731474688187821,
"grad_norm": 0.6831678748130798,
"learning_rate": 2.388089107868713e-07,
"loss": 0.5402,
"step": 1241
},
{
"epoch": 2.7336757153338223,
"grad_norm": 0.6602542996406555,
"learning_rate": 2.3490904314356412e-07,
"loss": 0.5229,
"step": 1242
},
{
"epoch": 2.735876742479824,
"grad_norm": 0.6660738587379456,
"learning_rate": 2.3104051546654016e-07,
"loss": 0.5268,
"step": 1243
},
{
"epoch": 2.7380777696258254,
"grad_norm": 0.680838406085968,
"learning_rate": 2.2720335319907472e-07,
"loss": 0.5466,
"step": 1244
},
{
"epoch": 2.7402787967718267,
"grad_norm": 0.6857189536094666,
"learning_rate": 2.2339758157815583e-07,
"loss": 0.4983,
"step": 1245
},
{
"epoch": 2.7424798239178285,
"grad_norm": 0.6480759382247925,
"learning_rate": 2.1962322563431283e-07,
"loss": 0.5103,
"step": 1246
},
{
"epoch": 2.74468085106383,
"grad_norm": 0.6562944054603577,
"learning_rate": 2.1588031019145638e-07,
"loss": 0.5257,
"step": 1247
},
{
"epoch": 2.746881878209831,
"grad_norm": 0.6717230081558228,
"learning_rate": 2.1216885986671155e-07,
"loss": 0.524,
"step": 1248
},
{
"epoch": 2.7490829053558326,
"grad_norm": 0.6565597057342529,
"learning_rate": 2.0848889907025883e-07,
"loss": 0.5382,
"step": 1249
},
{
"epoch": 2.7512839325018343,
"grad_norm": 0.6873770952224731,
"learning_rate": 2.0484045200517222e-07,
"loss": 0.5371,
"step": 1250
},
{
"epoch": 2.7534849596478357,
"grad_norm": 0.6550769805908203,
"learning_rate": 2.0122354266725874e-07,
"loss": 0.555,
"step": 1251
},
{
"epoch": 2.755685986793837,
"grad_norm": 0.667922854423523,
"learning_rate": 1.9763819484490353e-07,
"loss": 0.527,
"step": 1252
},
{
"epoch": 2.757887013939839,
"grad_norm": 0.6662009358406067,
"learning_rate": 1.9408443211891227e-07,
"loss": 0.5218,
"step": 1253
},
{
"epoch": 2.76008804108584,
"grad_norm": 0.6917633414268494,
"learning_rate": 1.9056227786235337e-07,
"loss": 0.5593,
"step": 1254
},
{
"epoch": 2.7622890682318415,
"grad_norm": 0.6952189207077026,
"learning_rate": 1.8707175524040998e-07,
"loss": 0.4856,
"step": 1255
},
{
"epoch": 2.764490095377843,
"grad_norm": 0.6845850348472595,
"learning_rate": 1.8361288721022053e-07,
"loss": 0.5288,
"step": 1256
},
{
"epoch": 2.766691122523844,
"grad_norm": 0.6552596688270569,
"learning_rate": 1.801856965207338e-07,
"loss": 0.5406,
"step": 1257
},
{
"epoch": 2.768892149669846,
"grad_norm": 0.6877692937850952,
"learning_rate": 1.76790205712557e-07,
"loss": 0.5183,
"step": 1258
},
{
"epoch": 2.7710931768158473,
"grad_norm": 0.6700577735900879,
"learning_rate": 1.7342643711780516e-07,
"loss": 0.5258,
"step": 1259
},
{
"epoch": 2.773294203961849,
"grad_norm": 0.6772248148918152,
"learning_rate": 1.700944128599602e-07,
"loss": 0.5303,
"step": 1260
},
{
"epoch": 2.7754952311078505,
"grad_norm": 0.6630265712738037,
"learning_rate": 1.667941548537194e-07,
"loss": 0.5395,
"step": 1261
},
{
"epoch": 2.777696258253852,
"grad_norm": 0.6755715608596802,
"learning_rate": 1.6352568480485277e-07,
"loss": 0.5104,
"step": 1262
},
{
"epoch": 2.779897285399853,
"grad_norm": 0.6613523364067078,
"learning_rate": 1.6028902421006464e-07,
"loss": 0.5245,
"step": 1263
},
{
"epoch": 2.7820983125458545,
"grad_norm": 0.676965057849884,
"learning_rate": 1.5708419435684463e-07,
"loss": 0.5297,
"step": 1264
},
{
"epoch": 2.7842993396918563,
"grad_norm": 0.6607319116592407,
"learning_rate": 1.5391121632333473e-07,
"loss": 0.5194,
"step": 1265
},
{
"epoch": 2.7865003668378576,
"grad_norm": 0.6863727569580078,
"learning_rate": 1.5077011097818729e-07,
"loss": 0.5452,
"step": 1266
},
{
"epoch": 2.7887013939838594,
"grad_norm": 0.6854506134986877,
"learning_rate": 1.4766089898042678e-07,
"loss": 0.5229,
"step": 1267
},
{
"epoch": 2.7909024211298608,
"grad_norm": 0.6733106374740601,
"learning_rate": 1.4458360077931721e-07,
"loss": 0.5321,
"step": 1268
},
{
"epoch": 2.793103448275862,
"grad_norm": 0.6609638333320618,
"learning_rate": 1.4153823661422485e-07,
"loss": 0.5228,
"step": 1269
},
{
"epoch": 2.7953044754218634,
"grad_norm": 0.6686742305755615,
"learning_rate": 1.3852482651448618e-07,
"loss": 0.566,
"step": 1270
},
{
"epoch": 2.797505502567865,
"grad_norm": 0.6608723998069763,
"learning_rate": 1.3554339029927532e-07,
"loss": 0.5514,
"step": 1271
},
{
"epoch": 2.7997065297138666,
"grad_norm": 0.6472584009170532,
"learning_rate": 1.3259394757747678e-07,
"loss": 0.5101,
"step": 1272
},
{
"epoch": 2.801907556859868,
"grad_norm": 0.6851097941398621,
"learning_rate": 1.2967651774755065e-07,
"loss": 0.5177,
"step": 1273
},
{
"epoch": 2.8041085840058693,
"grad_norm": 0.6754056811332703,
"learning_rate": 1.26791119997412e-07,
"loss": 0.5292,
"step": 1274
},
{
"epoch": 2.806309611151871,
"grad_norm": 0.6806917786598206,
"learning_rate": 1.2393777330429791e-07,
"loss": 0.5386,
"step": 1275
},
{
"epoch": 2.8085106382978724,
"grad_norm": 0.6873478293418884,
"learning_rate": 1.2111649643464785e-07,
"loss": 0.5351,
"step": 1276
},
{
"epoch": 2.8107116654438737,
"grad_norm": 0.6645894646644592,
"learning_rate": 1.1832730794397951e-07,
"loss": 0.5617,
"step": 1277
},
{
"epoch": 2.812912692589875,
"grad_norm": 0.6530945301055908,
"learning_rate": 1.1557022617676217e-07,
"loss": 0.548,
"step": 1278
},
{
"epoch": 2.815113719735877,
"grad_norm": 0.6693723201751709,
"learning_rate": 1.1284526926630124e-07,
"loss": 0.5367,
"step": 1279
},
{
"epoch": 2.817314746881878,
"grad_norm": 0.6465650200843811,
"learning_rate": 1.1015245513461837e-07,
"loss": 0.5192,
"step": 1280
},
{
"epoch": 2.8195157740278796,
"grad_norm": 0.6671846508979797,
"learning_rate": 1.0749180149233041e-07,
"loss": 0.5359,
"step": 1281
},
{
"epoch": 2.8217168011738814,
"grad_norm": 0.6585673689842224,
"learning_rate": 1.0486332583853565e-07,
"loss": 0.5268,
"step": 1282
},
{
"epoch": 2.8239178283198827,
"grad_norm": 0.6790010333061218,
"learning_rate": 1.0226704546069832e-07,
"loss": 0.5631,
"step": 1283
},
{
"epoch": 2.826118855465884,
"grad_norm": 0.6827207803726196,
"learning_rate": 9.970297743453484e-08,
"loss": 0.5276,
"step": 1284
},
{
"epoch": 2.8283198826118854,
"grad_norm": 0.6683819890022278,
"learning_rate": 9.717113862389993e-08,
"loss": 0.5365,
"step": 1285
},
{
"epoch": 2.830520909757887,
"grad_norm": 0.6550766825675964,
"learning_rate": 9.467154568067849e-08,
"loss": 0.5324,
"step": 1286
},
{
"epoch": 2.8327219369038885,
"grad_norm": 0.6635034084320068,
"learning_rate": 9.22042150446728e-08,
"loss": 0.5116,
"step": 1287
},
{
"epoch": 2.83492296404989,
"grad_norm": 0.6637604236602783,
"learning_rate": 8.976916294349935e-08,
"loss": 0.5152,
"step": 1288
},
{
"epoch": 2.8371239911958916,
"grad_norm": 0.6546451449394226,
"learning_rate": 8.736640539247498e-08,
"loss": 0.5314,
"step": 1289
},
{
"epoch": 2.839325018341893,
"grad_norm": 0.6624500155448914,
"learning_rate": 8.499595819451811e-08,
"loss": 0.5092,
"step": 1290
},
{
"epoch": 2.8415260454878943,
"grad_norm": 0.6892501711845398,
"learning_rate": 8.265783694004214e-08,
"loss": 0.5349,
"step": 1291
},
{
"epoch": 2.8437270726338957,
"grad_norm": 0.6518153548240662,
"learning_rate": 8.035205700685167e-08,
"loss": 0.5485,
"step": 1292
},
{
"epoch": 2.845928099779897,
"grad_norm": 0.6721088290214539,
"learning_rate": 7.80786335600442e-08,
"loss": 0.5432,
"step": 1293
},
{
"epoch": 2.848129126925899,
"grad_norm": 0.6561071872711182,
"learning_rate": 7.58375815519069e-08,
"loss": 0.5001,
"step": 1294
},
{
"epoch": 2.8503301540719,
"grad_norm": 0.6741259694099426,
"learning_rate": 7.362891572182284e-08,
"loss": 0.5177,
"step": 1295
},
{
"epoch": 2.852531181217902,
"grad_norm": 0.6415325999259949,
"learning_rate": 7.145265059616934e-08,
"loss": 0.5226,
"step": 1296
},
{
"epoch": 2.8547322083639033,
"grad_norm": 0.700303852558136,
"learning_rate": 6.930880048822531e-08,
"loss": 0.5223,
"step": 1297
},
{
"epoch": 2.8569332355099046,
"grad_norm": 0.6644274592399597,
"learning_rate": 6.71973794980757e-08,
"loss": 0.5553,
"step": 1298
},
{
"epoch": 2.859134262655906,
"grad_norm": 0.6964889764785767,
"learning_rate": 6.511840151252169e-08,
"loss": 0.5346,
"step": 1299
},
{
"epoch": 2.8613352898019073,
"grad_norm": 0.6516728401184082,
"learning_rate": 6.307188020498401e-08,
"loss": 0.5161,
"step": 1300
},
{
"epoch": 2.863536316947909,
"grad_norm": 0.7084875106811523,
"learning_rate": 6.105782903541746e-08,
"loss": 0.5439,
"step": 1301
},
{
"epoch": 2.8657373440939105,
"grad_norm": 0.6466243863105774,
"learning_rate": 5.907626125022159e-08,
"loss": 0.5285,
"step": 1302
},
{
"epoch": 2.867938371239912,
"grad_norm": 0.6607745289802551,
"learning_rate": 5.712718988215182e-08,
"loss": 0.5352,
"step": 1303
},
{
"epoch": 2.8701393983859136,
"grad_norm": 0.6683618426322937,
"learning_rate": 5.521062775023567e-08,
"loss": 0.5071,
"step": 1304
},
{
"epoch": 2.872340425531915,
"grad_norm": 0.643132746219635,
"learning_rate": 5.3326587459687774e-08,
"loss": 0.5205,
"step": 1305
},
{
"epoch": 2.8745414526779163,
"grad_norm": 0.6603211760520935,
"learning_rate": 5.1475081401825553e-08,
"loss": 0.5508,
"step": 1306
},
{
"epoch": 2.8767424798239176,
"grad_norm": 0.6604293584823608,
"learning_rate": 4.9656121753990924e-08,
"loss": 0.5152,
"step": 1307
},
{
"epoch": 2.8789435069699194,
"grad_norm": 0.6662620902061462,
"learning_rate": 4.7869720479466475e-08,
"loss": 0.5581,
"step": 1308
},
{
"epoch": 2.8811445341159208,
"grad_norm": 0.6571532487869263,
"learning_rate": 4.611588932740107e-08,
"loss": 0.5288,
"step": 1309
},
{
"epoch": 2.883345561261922,
"grad_norm": 0.6750805974006653,
"learning_rate": 4.439463983272663e-08,
"loss": 0.546,
"step": 1310
},
{
"epoch": 2.885546588407924,
"grad_norm": 0.6604536175727844,
"learning_rate": 4.270598331608977e-08,
"loss": 0.5204,
"step": 1311
},
{
"epoch": 2.8877476155539252,
"grad_norm": 0.6560507416725159,
"learning_rate": 4.104993088376974e-08,
"loss": 0.5414,
"step": 1312
},
{
"epoch": 2.8899486426999266,
"grad_norm": 0.6712255477905273,
"learning_rate": 3.9426493427611177e-08,
"loss": 0.5302,
"step": 1313
},
{
"epoch": 2.892149669845928,
"grad_norm": 0.6827952265739441,
"learning_rate": 3.7835681624949216e-08,
"loss": 0.5247,
"step": 1314
},
{
"epoch": 2.8943506969919297,
"grad_norm": 0.6624239087104797,
"learning_rate": 3.6277505938541735e-08,
"loss": 0.5265,
"step": 1315
},
{
"epoch": 2.896551724137931,
"grad_norm": 0.6647094488143921,
"learning_rate": 3.475197661649665e-08,
"loss": 0.5421,
"step": 1316
},
{
"epoch": 2.8987527512839324,
"grad_norm": 0.6717029809951782,
"learning_rate": 3.325910369220975e-08,
"loss": 0.5361,
"step": 1317
},
{
"epoch": 2.900953778429934,
"grad_norm": 0.6782132983207703,
"learning_rate": 3.179889698429473e-08,
"loss": 0.501,
"step": 1318
},
{
"epoch": 2.9031548055759355,
"grad_norm": 0.6535486578941345,
"learning_rate": 3.037136609651881e-08,
"loss": 0.5327,
"step": 1319
},
{
"epoch": 2.905355832721937,
"grad_norm": 0.6596401333808899,
"learning_rate": 2.8976520417742794e-08,
"loss": 0.5283,
"step": 1320
},
{
"epoch": 2.907556859867938,
"grad_norm": 0.6687381863594055,
"learning_rate": 2.7614369121854444e-08,
"loss": 0.5433,
"step": 1321
},
{
"epoch": 2.90975788701394,
"grad_norm": 0.6421722769737244,
"learning_rate": 2.6284921167712975e-08,
"loss": 0.5593,
"step": 1322
},
{
"epoch": 2.9119589141599413,
"grad_norm": 0.6447445154190063,
"learning_rate": 2.4988185299087973e-08,
"loss": 0.5325,
"step": 1323
},
{
"epoch": 2.9141599413059427,
"grad_norm": 0.6399628520011902,
"learning_rate": 2.3724170044600036e-08,
"loss": 0.5459,
"step": 1324
},
{
"epoch": 2.9163609684519445,
"grad_norm": 0.650874674320221,
"learning_rate": 2.2492883717668557e-08,
"loss": 0.5393,
"step": 1325
},
{
"epoch": 2.918561995597946,
"grad_norm": 0.6730715036392212,
"learning_rate": 2.1294334416453456e-08,
"loss": 0.518,
"step": 1326
},
{
"epoch": 2.920763022743947,
"grad_norm": 0.6591706871986389,
"learning_rate": 2.012853002380466e-08,
"loss": 0.5363,
"step": 1327
},
{
"epoch": 2.9229640498899485,
"grad_norm": 0.6662129759788513,
"learning_rate": 1.899547820720882e-08,
"loss": 0.5179,
"step": 1328
},
{
"epoch": 2.92516507703595,
"grad_norm": 0.669715404510498,
"learning_rate": 1.7895186418738773e-08,
"loss": 0.5414,
"step": 1329
},
{
"epoch": 2.9273661041819516,
"grad_norm": 0.6548400521278381,
"learning_rate": 1.6827661895004176e-08,
"loss": 0.5362,
"step": 1330
},
{
"epoch": 2.929567131327953,
"grad_norm": 0.6878491044044495,
"learning_rate": 1.5792911657107057e-08,
"loss": 0.5565,
"step": 1331
},
{
"epoch": 2.9317681584739548,
"grad_norm": 0.6532850861549377,
"learning_rate": 1.4790942510590767e-08,
"loss": 0.5064,
"step": 1332
},
{
"epoch": 2.933969185619956,
"grad_norm": 0.6462960839271545,
"learning_rate": 1.382176104539834e-08,
"loss": 0.5216,
"step": 1333
},
{
"epoch": 2.9361702127659575,
"grad_norm": 0.6616500020027161,
"learning_rate": 1.2885373635829756e-08,
"loss": 0.5209,
"step": 1334
},
{
"epoch": 2.938371239911959,
"grad_norm": 0.6529484391212463,
"learning_rate": 1.1981786440497523e-08,
"loss": 0.5328,
"step": 1335
},
{
"epoch": 2.94057226705796,
"grad_norm": 0.6724916696548462,
"learning_rate": 1.1111005402286712e-08,
"loss": 0.5244,
"step": 1336
},
{
"epoch": 2.942773294203962,
"grad_norm": 0.6753798127174377,
"learning_rate": 1.0273036248318325e-08,
"loss": 0.5369,
"step": 1337
},
{
"epoch": 2.9449743213499633,
"grad_norm": 0.6889106035232544,
"learning_rate": 9.467884489908763e-09,
"loss": 0.4993,
"step": 1338
},
{
"epoch": 2.9471753484959646,
"grad_norm": 0.6721296906471252,
"learning_rate": 8.695555422534863e-09,
"loss": 0.5455,
"step": 1339
},
{
"epoch": 2.9493763756419664,
"grad_norm": 0.6582779288291931,
"learning_rate": 7.956054125798917e-09,
"loss": 0.5015,
"step": 1340
},
{
"epoch": 2.9515774027879678,
"grad_norm": 0.6632301211357117,
"learning_rate": 7.249385463395375e-09,
"loss": 0.5495,
"step": 1341
},
{
"epoch": 2.953778429933969,
"grad_norm": 0.6691292524337769,
"learning_rate": 6.575554083078084e-09,
"loss": 0.5254,
"step": 1342
},
{
"epoch": 2.9559794570799705,
"grad_norm": 0.647284209728241,
"learning_rate": 5.934564416631427e-09,
"loss": 0.5184,
"step": 1343
},
{
"epoch": 2.9581804842259722,
"grad_norm": 0.671001136302948,
"learning_rate": 5.3264206798392395e-09,
"loss": 0.5225,
"step": 1344
},
{
"epoch": 2.9603815113719736,
"grad_norm": 0.6739072799682617,
"learning_rate": 4.751126872458156e-09,
"loss": 0.5373,
"step": 1345
},
{
"epoch": 2.962582538517975,
"grad_norm": 0.661592960357666,
"learning_rate": 4.208686778190974e-09,
"loss": 0.5313,
"step": 1346
},
{
"epoch": 2.9647835656639767,
"grad_norm": 0.6584525108337402,
"learning_rate": 3.6991039646616657e-09,
"loss": 0.5065,
"step": 1347
},
{
"epoch": 2.966984592809978,
"grad_norm": 0.6595169305801392,
"learning_rate": 3.2223817833931803e-09,
"loss": 0.537,
"step": 1348
},
{
"epoch": 2.9691856199559794,
"grad_norm": 0.672909140586853,
"learning_rate": 2.7785233697835702e-09,
"loss": 0.503,
"step": 1349
},
{
"epoch": 2.9713866471019807,
"grad_norm": 0.681883692741394,
"learning_rate": 2.367531643085452e-09,
"loss": 0.5301,
"step": 1350
},
{
"epoch": 2.9735876742479825,
"grad_norm": 0.6718893051147461,
"learning_rate": 1.989409306388801e-09,
"loss": 0.554,
"step": 1351
},
{
"epoch": 2.975788701393984,
"grad_norm": 0.6698089838027954,
"learning_rate": 1.6441588466009627e-09,
"loss": 0.5156,
"step": 1352
},
{
"epoch": 2.9779897285399852,
"grad_norm": 0.6642057299613953,
"learning_rate": 1.3317825344316692e-09,
"loss": 0.5235,
"step": 1353
},
{
"epoch": 2.980190755685987,
"grad_norm": 0.6814525127410889,
"learning_rate": 1.0522824243774932e-09,
"loss": 0.5399,
"step": 1354
},
{
"epoch": 2.9823917828319884,
"grad_norm": 0.6825253963470459,
"learning_rate": 8.056603547090813e-10,
"loss": 0.5273,
"step": 1355
},
{
"epoch": 2.9845928099779897,
"grad_norm": 0.6708872318267822,
"learning_rate": 5.919179474567216e-10,
"loss": 0.5495,
"step": 1356
},
{
"epoch": 2.986793837123991,
"grad_norm": 0.6687802672386169,
"learning_rate": 4.1105660840368154e-10,
"loss": 0.5332,
"step": 1357
},
{
"epoch": 2.9889948642699924,
"grad_norm": 0.6779024600982666,
"learning_rate": 2.630775270728858e-10,
"loss": 0.5119,
"step": 1358
},
{
"epoch": 2.991195891415994,
"grad_norm": 0.6520714163780212,
"learning_rate": 1.4798167672192e-10,
"loss": 0.5267,
"step": 1359
},
{
"epoch": 2.9933969185619955,
"grad_norm": 0.6646630764007568,
"learning_rate": 6.57698143352592e-11,
"loss": 0.5453,
"step": 1360
},
{
"epoch": 2.9955979457079973,
"grad_norm": 0.6896069645881653,
"learning_rate": 1.6442480619272007e-11,
"loss": 0.526,
"step": 1361
},
{
"epoch": 2.9977989728539987,
"grad_norm": 0.6493738293647766,
"learning_rate": 0.0,
"loss": 0.5299,
"step": 1362
},
{
"epoch": 2.9977989728539987,
"step": 1362,
"total_flos": 6.23071684524499e+17,
"train_loss": 0.0,
"train_runtime": 12.8391,
"train_samples_per_second": 10189.286,
"train_steps_per_second": 106.082
}
],
"logging_steps": 1,
"max_steps": 1362,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 6.23071684524499e+17,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}