diff --git a/client_states_v9_NEURIPS_DISJOINT_Memonly_LORA_llava_lr2e-5_bs1_gradacc32_iter0_25_gradnorm_scenario18_new_10000_random0_0625_seed1/0_trainer_state.json b/client_states_v9_NEURIPS_DISJOINT_Memonly_LORA_llava_lr2e-5_bs1_gradacc32_iter0_25_gradnorm_scenario18_new_10000_random0_0625_seed1/0_trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..6c827283b44ca682462de986d8091e9d6e681cfa --- /dev/null +++ b/client_states_v9_NEURIPS_DISJOINT_Memonly_LORA_llava_lr2e-5_bs1_gradacc32_iter0_25_gradnorm_scenario18_new_10000_random0_0625_seed1/0_trainer_state.json @@ -0,0 +1,8782 @@ +{ + "best_global_step": null, + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 1.0, + "eval_steps": 500, + "global_step": 2500, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0008, + "grad_norm": 7.428676605224609, + "learning_rate": 2.357535430610912e-06, + "loss": 0.8135, + "step": 2 + }, + { + "epoch": 0.0016, + "grad_norm": 4.688574314117432, + "learning_rate": 2.3755748898855234e-06, + "loss": 0.4197, + "step": 4 + }, + { + "epoch": 0.0024, + "grad_norm": 6.85806941986084, + "learning_rate": 2.3936738059587174e-06, + "loss": 0.5889, + "step": 6 + }, + { + "epoch": 0.0032, + "grad_norm": 4.939311981201172, + "learning_rate": 2.411832037691545e-06, + "loss": 1.7777, + "step": 8 + }, + { + "epoch": 0.004, + "grad_norm": 6.581257343292236, + "learning_rate": 2.430049443482434e-06, + "loss": 0.7227, + "step": 10 + }, + { + "epoch": 0.0048, + "grad_norm": 5.2789812088012695, + "learning_rate": 2.448325881268406e-06, + "loss": 0.6827, + "step": 12 + }, + { + "epoch": 0.0056, + "grad_norm": 8.367552757263184, + "learning_rate": 2.4666612085261277e-06, + "loss": 0.7583, + "step": 14 + }, + { + "epoch": 0.0064, + "grad_norm": 6.309530735015869, + "learning_rate": 2.4850552822730346e-06, + "loss": 0.5772, + "step": 16 + }, + { + "epoch": 0.0072, + "grad_norm": 11.15611457824707, + "learning_rate": 2.503507959068455e-06, + "loss": 0.5898, + "step": 18 + }, + { + "epoch": 0.008, + "grad_norm": 2.9229376316070557, + "learning_rate": 2.522019095014686e-06, + "loss": 0.1423, + "step": 20 + }, + { + "epoch": 0.0088, + "grad_norm": 13.48912525177002, + "learning_rate": 2.5405885457581814e-06, + "loss": 0.9523, + "step": 22 + }, + { + "epoch": 0.0096, + "grad_norm": 7.22756290435791, + "learning_rate": 2.5592161664906243e-06, + "loss": 0.329, + "step": 24 + }, + { + "epoch": 0.0104, + "grad_norm": 3.864868402481079, + "learning_rate": 2.5779018119501086e-06, + "loss": 0.3043, + "step": 26 + }, + { + "epoch": 0.0112, + "grad_norm": 7.621895790100098, + "learning_rate": 2.596645336422219e-06, + "loss": 0.3123, + "step": 28 + }, + { + "epoch": 0.012, + "grad_norm": 6.358210563659668, + "learning_rate": 2.615446593741161e-06, + "loss": 0.5717, + "step": 30 + }, + { + "epoch": 0.0128, + "grad_norm": 2.5341553688049316, + "learning_rate": 2.6343054372909648e-06, + "loss": 0.3356, + "step": 32 + }, + { + "epoch": 0.0136, + "grad_norm": 7.333450794219971, + "learning_rate": 2.6532217200065826e-06, + "loss": 0.4261, + "step": 34 + }, + { + "epoch": 0.0144, + "grad_norm": 9.422982215881348, + "learning_rate": 2.6721952943750396e-06, + "loss": 0.7553, + "step": 36 + }, + { + "epoch": 0.0152, + "grad_norm": 7.869431972503662, + "learning_rate": 2.691226012436604e-06, + "loss": 0.4286, + "step": 38 + }, + { + "epoch": 0.016, + "grad_norm": 5.751649379730225, + "learning_rate": 2.7103137257858893e-06, + "loss": 0.5196, + "step": 40 + }, + { + "epoch": 0.0168, + "grad_norm": 9.487853050231934, + "learning_rate": 2.7294582855730733e-06, + "loss": 0.3418, + "step": 42 + }, + { + "epoch": 0.0176, + "grad_norm": 0.8265520334243774, + "learning_rate": 2.7486595425050566e-06, + "loss": 0.36, + "step": 44 + }, + { + "epoch": 0.0184, + "grad_norm": 6.352355003356934, + "learning_rate": 2.7679173468465813e-06, + "loss": 0.6908, + "step": 46 + }, + { + "epoch": 0.0192, + "grad_norm": 6.220467567443848, + "learning_rate": 2.7872315484213954e-06, + "loss": 0.5355, + "step": 48 + }, + { + "epoch": 0.02, + "grad_norm": 8.542949676513672, + "learning_rate": 2.8066019966134873e-06, + "loss": 0.3584, + "step": 50 + }, + { + "epoch": 0.0208, + "grad_norm": 12.532072067260742, + "learning_rate": 2.826028540368212e-06, + "loss": 0.7105, + "step": 52 + }, + { + "epoch": 0.0216, + "grad_norm": 22.506433486938477, + "learning_rate": 2.845511028193477e-06, + "loss": 1.2368, + "step": 54 + }, + { + "epoch": 0.0224, + "grad_norm": 5.032310485839844, + "learning_rate": 2.865049308160931e-06, + "loss": 0.8072, + "step": 56 + }, + { + "epoch": 0.0232, + "grad_norm": 5.905128479003906, + "learning_rate": 2.8846432279071533e-06, + "loss": 0.4286, + "step": 58 + }, + { + "epoch": 0.024, + "grad_norm": 4.69681453704834, + "learning_rate": 2.9042926346347835e-06, + "loss": 0.3218, + "step": 60 + }, + { + "epoch": 0.0248, + "grad_norm": 7.986056804656982, + "learning_rate": 2.9239973751138397e-06, + "loss": 0.2838, + "step": 62 + }, + { + "epoch": 0.0256, + "grad_norm": 7.595277309417725, + "learning_rate": 2.943757295682783e-06, + "loss": 0.8026, + "step": 64 + }, + { + "epoch": 0.0264, + "grad_norm": 8.339045524597168, + "learning_rate": 2.9635722422497983e-06, + "loss": 0.3001, + "step": 66 + }, + { + "epoch": 0.0272, + "grad_norm": 2.1323695182800293, + "learning_rate": 2.983442060293926e-06, + "loss": 0.2492, + "step": 68 + }, + { + "epoch": 0.028, + "grad_norm": 7.9373908042907715, + "learning_rate": 3.003366594866345e-06, + "loss": 0.7103, + "step": 70 + }, + { + "epoch": 0.0288, + "grad_norm": 4.134149551391602, + "learning_rate": 3.0233456905915338e-06, + "loss": 0.7687, + "step": 72 + }, + { + "epoch": 0.0296, + "grad_norm": 7.5926666259765625, + "learning_rate": 3.0433791916684885e-06, + "loss": 0.5618, + "step": 74 + }, + { + "epoch": 0.0304, + "grad_norm": 7.338794231414795, + "learning_rate": 3.0634669418719453e-06, + "loss": 0.4229, + "step": 76 + }, + { + "epoch": 0.0312, + "grad_norm": 10.961363792419434, + "learning_rate": 3.0836087845535933e-06, + "loss": 0.7771, + "step": 78 + }, + { + "epoch": 0.032, + "grad_norm": 7.643119812011719, + "learning_rate": 3.1038045626432945e-06, + "loss": 0.5454, + "step": 80 + }, + { + "epoch": 0.0328, + "grad_norm": 6.308757305145264, + "learning_rate": 3.1240541186503173e-06, + "loss": 0.337, + "step": 82 + }, + { + "epoch": 0.0336, + "grad_norm": 5.920470714569092, + "learning_rate": 3.1443572946645683e-06, + "loss": 0.6162, + "step": 84 + }, + { + "epoch": 0.0344, + "grad_norm": 4.9982733726501465, + "learning_rate": 3.164713932357776e-06, + "loss": 0.3885, + "step": 86 + }, + { + "epoch": 0.0352, + "grad_norm": 4.772004127502441, + "learning_rate": 3.1851238729848033e-06, + "loss": 0.4505, + "step": 88 + }, + { + "epoch": 0.036, + "grad_norm": 6.033514022827148, + "learning_rate": 3.205586957384834e-06, + "loss": 0.5492, + "step": 90 + }, + { + "epoch": 0.0368, + "grad_norm": 7.672890663146973, + "learning_rate": 3.2261030259826253e-06, + "loss": 0.5542, + "step": 92 + }, + { + "epoch": 0.0376, + "grad_norm": 4.560334205627441, + "learning_rate": 3.246671918789752e-06, + "loss": 0.4246, + "step": 94 + }, + { + "epoch": 0.0384, + "grad_norm": 8.157868385314941, + "learning_rate": 3.267293475405858e-06, + "loss": 0.5161, + "step": 96 + }, + { + "epoch": 0.0392, + "grad_norm": 9.780196189880371, + "learning_rate": 3.2879675350199004e-06, + "loss": 0.4372, + "step": 98 + }, + { + "epoch": 0.04, + "grad_norm": 20.107337951660156, + "learning_rate": 3.3086939364114113e-06, + "loss": 0.7497, + "step": 100 + }, + { + "epoch": 0.0408, + "grad_norm": 5.913912296295166, + "learning_rate": 3.329472517951747e-06, + "loss": 0.4863, + "step": 102 + }, + { + "epoch": 0.0416, + "grad_norm": 5.233573913574219, + "learning_rate": 3.350303117605369e-06, + "loss": 0.3886, + "step": 104 + }, + { + "epoch": 0.0424, + "grad_norm": 7.944777488708496, + "learning_rate": 3.3711855729310503e-06, + "loss": 0.7644, + "step": 106 + }, + { + "epoch": 0.0432, + "grad_norm": 7.0277485847473145, + "learning_rate": 3.3921197210832235e-06, + "loss": 0.7131, + "step": 108 + }, + { + "epoch": 0.044, + "grad_norm": 25.346858978271484, + "learning_rate": 3.4131053988131947e-06, + "loss": 0.8946, + "step": 110 + }, + { + "epoch": 0.0448, + "grad_norm": 4.840317249298096, + "learning_rate": 3.434142442470434e-06, + "loss": 0.3861, + "step": 112 + }, + { + "epoch": 0.0456, + "grad_norm": 5.518627643585205, + "learning_rate": 3.455230688003849e-06, + "loss": 0.6604, + "step": 114 + }, + { + "epoch": 0.0464, + "grad_norm": 6.281485557556152, + "learning_rate": 3.476369970963065e-06, + "loss": 0.648, + "step": 116 + }, + { + "epoch": 0.0472, + "grad_norm": 13.487313270568848, + "learning_rate": 3.497560126499706e-06, + "loss": 0.7007, + "step": 118 + }, + { + "epoch": 0.048, + "grad_norm": 2.665107250213623, + "learning_rate": 3.5188009893686836e-06, + "loss": 0.1257, + "step": 120 + }, + { + "epoch": 0.0488, + "grad_norm": 8.823912620544434, + "learning_rate": 3.5400923939294827e-06, + "loss": 0.6357, + "step": 122 + }, + { + "epoch": 0.0496, + "grad_norm": 3.015528678894043, + "learning_rate": 3.5614341741474667e-06, + "loss": 0.1395, + "step": 124 + }, + { + "epoch": 0.0504, + "grad_norm": 3.595741033554077, + "learning_rate": 3.5828261635951177e-06, + "loss": 0.3638, + "step": 126 + }, + { + "epoch": 0.0512, + "grad_norm": 4.580526828765869, + "learning_rate": 3.604268195453421e-06, + "loss": 0.5191, + "step": 128 + }, + { + "epoch": 0.052, + "grad_norm": 7.3755292892456055, + "learning_rate": 3.6257601025130893e-06, + "loss": 0.4993, + "step": 130 + }, + { + "epoch": 0.0528, + "grad_norm": 9.659078598022461, + "learning_rate": 3.647301717175955e-06, + "loss": 0.4007, + "step": 132 + }, + { + "epoch": 0.0536, + "grad_norm": 6.970526695251465, + "learning_rate": 3.66889287145614e-06, + "loss": 0.9278, + "step": 134 + }, + { + "epoch": 0.0544, + "grad_norm": 9.35775375366211, + "learning_rate": 3.6905333969814995e-06, + "loss": 0.7499, + "step": 136 + }, + { + "epoch": 0.0552, + "grad_norm": 8.061389923095703, + "learning_rate": 3.712223124994867e-06, + "loss": 0.3931, + "step": 138 + }, + { + "epoch": 0.056, + "grad_norm": 7.523435592651367, + "learning_rate": 3.7339618863553885e-06, + "loss": 0.3639, + "step": 140 + }, + { + "epoch": 0.0568, + "grad_norm": 5.77680778503418, + "learning_rate": 3.755749511539848e-06, + "loss": 0.5739, + "step": 142 + }, + { + "epoch": 0.0576, + "grad_norm": 8.214181900024414, + "learning_rate": 3.7775858306439404e-06, + "loss": 0.4753, + "step": 144 + }, + { + "epoch": 0.0584, + "grad_norm": 6.254648685455322, + "learning_rate": 3.799470673383677e-06, + "loss": 0.3452, + "step": 146 + }, + { + "epoch": 0.0592, + "grad_norm": 8.179805755615234, + "learning_rate": 3.821403869096644e-06, + "loss": 0.4398, + "step": 148 + }, + { + "epoch": 0.06, + "grad_norm": 5.415241718292236, + "learning_rate": 3.8433852467434175e-06, + "loss": 0.6625, + "step": 150 + }, + { + "epoch": 0.0608, + "grad_norm": 4.284675121307373, + "learning_rate": 3.865414634908756e-06, + "loss": 0.4488, + "step": 152 + }, + { + "epoch": 0.0616, + "grad_norm": 7.8885345458984375, + "learning_rate": 3.887491861803081e-06, + "loss": 0.6684, + "step": 154 + }, + { + "epoch": 0.0624, + "grad_norm": 5.000823497772217, + "learning_rate": 3.909616755263741e-06, + "loss": 0.5359, + "step": 156 + }, + { + "epoch": 0.0632, + "grad_norm": 3.3473854064941406, + "learning_rate": 3.9317891427563725e-06, + "loss": 0.403, + "step": 158 + }, + { + "epoch": 0.064, + "grad_norm": 5.74239444732666, + "learning_rate": 3.954008851376244e-06, + "loss": 0.4741, + "step": 160 + }, + { + "epoch": 0.0648, + "grad_norm": 7.925304889678955, + "learning_rate": 3.976275707849619e-06, + "loss": 0.4604, + "step": 162 + }, + { + "epoch": 0.0656, + "grad_norm": 7.1892571449279785, + "learning_rate": 3.99858953853505e-06, + "loss": 0.547, + "step": 164 + }, + { + "epoch": 0.0664, + "grad_norm": 6.141909599304199, + "learning_rate": 4.0209501694248e-06, + "loss": 0.4658, + "step": 166 + }, + { + "epoch": 0.0672, + "grad_norm": 11.796825408935547, + "learning_rate": 4.043357426146209e-06, + "loss": 0.6537, + "step": 168 + }, + { + "epoch": 0.068, + "grad_norm": 3.107785940170288, + "learning_rate": 4.065811133962987e-06, + "loss": 0.3216, + "step": 170 + }, + { + "epoch": 0.0688, + "grad_norm": 6.113830089569092, + "learning_rate": 4.08831111777658e-06, + "loss": 0.6906, + "step": 172 + }, + { + "epoch": 0.0696, + "grad_norm": 8.406272888183594, + "learning_rate": 4.110857202127611e-06, + "loss": 0.6615, + "step": 174 + }, + { + "epoch": 0.0704, + "grad_norm": 6.46627140045166, + "learning_rate": 4.133449211197183e-06, + "loss": 0.5224, + "step": 176 + }, + { + "epoch": 0.0712, + "grad_norm": 4.773953914642334, + "learning_rate": 4.156086968808274e-06, + "loss": 0.3986, + "step": 178 + }, + { + "epoch": 0.072, + "grad_norm": 2.648827314376831, + "learning_rate": 4.178770298427114e-06, + "loss": 0.3031, + "step": 180 + }, + { + "epoch": 0.0728, + "grad_norm": 5.903095722198486, + "learning_rate": 4.201499023164515e-06, + "loss": 0.4427, + "step": 182 + }, + { + "epoch": 0.0736, + "grad_norm": 11.300853729248047, + "learning_rate": 4.224272965777315e-06, + "loss": 0.8331, + "step": 184 + }, + { + "epoch": 0.0744, + "grad_norm": 4.325449466705322, + "learning_rate": 4.247091948669764e-06, + "loss": 0.3271, + "step": 186 + }, + { + "epoch": 0.0752, + "grad_norm": 8.037109375, + "learning_rate": 4.269955793894849e-06, + "loss": 0.6188, + "step": 188 + }, + { + "epoch": 0.076, + "grad_norm": 5.573611259460449, + "learning_rate": 4.292864323155684e-06, + "loss": 0.5308, + "step": 190 + }, + { + "epoch": 0.0768, + "grad_norm": 9.673019409179688, + "learning_rate": 4.3158173578069696e-06, + "loss": 0.5004, + "step": 192 + }, + { + "epoch": 0.0776, + "grad_norm": 2.4727535247802734, + "learning_rate": 4.338814718856333e-06, + "loss": 0.3358, + "step": 194 + }, + { + "epoch": 0.0784, + "grad_norm": 5.131119728088379, + "learning_rate": 4.3618562269657285e-06, + "loss": 0.2717, + "step": 196 + }, + { + "epoch": 0.0792, + "grad_norm": 18.657052993774414, + "learning_rate": 4.384941702452852e-06, + "loss": 0.8001, + "step": 198 + }, + { + "epoch": 0.08, + "grad_norm": 2.7666983604431152, + "learning_rate": 4.408070965292526e-06, + "loss": 0.4866, + "step": 200 + }, + { + "epoch": 0.0808, + "grad_norm": 3.064669609069824, + "learning_rate": 4.431243835118112e-06, + "loss": 0.3906, + "step": 202 + }, + { + "epoch": 0.0816, + "grad_norm": 12.827176094055176, + "learning_rate": 4.4544601312229185e-06, + "loss": 0.7817, + "step": 204 + }, + { + "epoch": 0.0824, + "grad_norm": 12.40218734741211, + "learning_rate": 4.477719672561602e-06, + "loss": 0.511, + "step": 206 + }, + { + "epoch": 0.0832, + "grad_norm": 7.720707416534424, + "learning_rate": 4.501022277751605e-06, + "loss": 0.396, + "step": 208 + }, + { + "epoch": 0.084, + "grad_norm": 5.669599533081055, + "learning_rate": 4.524367765074499e-06, + "loss": 0.3701, + "step": 210 + }, + { + "epoch": 0.0848, + "grad_norm": 14.275040626525879, + "learning_rate": 4.5477559524775e-06, + "loss": 0.9219, + "step": 212 + }, + { + "epoch": 0.0856, + "grad_norm": 5.309834957122803, + "learning_rate": 4.571186657574823e-06, + "loss": 0.5128, + "step": 214 + }, + { + "epoch": 0.0864, + "grad_norm": 8.355277061462402, + "learning_rate": 4.5946596976491254e-06, + "loss": 0.7039, + "step": 216 + }, + { + "epoch": 0.0872, + "grad_norm": 8.066947937011719, + "learning_rate": 4.618174889652924e-06, + "loss": 0.5883, + "step": 218 + }, + { + "epoch": 0.088, + "grad_norm": 8.851380348205566, + "learning_rate": 4.6417320502100286e-06, + "loss": 0.6141, + "step": 220 + }, + { + "epoch": 0.0888, + "grad_norm": 6.146321773529053, + "learning_rate": 4.665330995616967e-06, + "loss": 0.4213, + "step": 222 + }, + { + "epoch": 0.0896, + "grad_norm": 2.3837244510650635, + "learning_rate": 4.688971541844424e-06, + "loss": 0.372, + "step": 224 + }, + { + "epoch": 0.0904, + "grad_norm": 15.606354713439941, + "learning_rate": 4.712653504538672e-06, + "loss": 0.6691, + "step": 226 + }, + { + "epoch": 0.0912, + "grad_norm": 5.409621238708496, + "learning_rate": 4.736376699023023e-06, + "loss": 0.3749, + "step": 228 + }, + { + "epoch": 0.092, + "grad_norm": 3.3378796577453613, + "learning_rate": 4.76014094029921e-06, + "loss": 0.2424, + "step": 230 + }, + { + "epoch": 0.0928, + "grad_norm": 6.939662933349609, + "learning_rate": 4.7839460430489216e-06, + "loss": 0.2881, + "step": 232 + }, + { + "epoch": 0.0936, + "grad_norm": 11.248266220092773, + "learning_rate": 4.807791821635185e-06, + "loss": 0.6406, + "step": 234 + }, + { + "epoch": 0.0944, + "grad_norm": 14.713454246520996, + "learning_rate": 4.831678090103828e-06, + "loss": 1.1709, + "step": 236 + }, + { + "epoch": 0.0952, + "grad_norm": 3.425539016723633, + "learning_rate": 4.855604662184931e-06, + "loss": 0.4371, + "step": 238 + }, + { + "epoch": 0.096, + "grad_norm": 4.274609088897705, + "learning_rate": 4.8795713512942785e-06, + "loss": 0.2196, + "step": 240 + }, + { + "epoch": 0.0968, + "grad_norm": 5.718649387359619, + "learning_rate": 4.903577970534815e-06, + "loss": 0.3956, + "step": 242 + }, + { + "epoch": 0.0976, + "grad_norm": 5.639620780944824, + "learning_rate": 4.9276243326981e-06, + "loss": 0.4738, + "step": 244 + }, + { + "epoch": 0.0984, + "grad_norm": 6.417396545410156, + "learning_rate": 4.951710250265788e-06, + "loss": 0.7023, + "step": 246 + }, + { + "epoch": 0.0992, + "grad_norm": 5.298447608947754, + "learning_rate": 4.975835535411023e-06, + "loss": 0.5218, + "step": 248 + }, + { + "epoch": 0.1, + "grad_norm": 7.021275520324707, + "learning_rate": 5.000000000000003e-06, + "loss": 0.4748, + "step": 250 + }, + { + "epoch": 0.1008, + "grad_norm": 4.228523254394531, + "learning_rate": 5.024203455593375e-06, + "loss": 0.1665, + "step": 252 + }, + { + "epoch": 0.1016, + "grad_norm": 4.622741222381592, + "learning_rate": 5.048445713447734e-06, + "loss": 0.4185, + "step": 254 + }, + { + "epoch": 0.1024, + "grad_norm": 5.1597208976745605, + "learning_rate": 5.072726584517083e-06, + "loss": 0.336, + "step": 256 + }, + { + "epoch": 0.1032, + "grad_norm": 7.466769695281982, + "learning_rate": 5.097045879454308e-06, + "loss": 0.3572, + "step": 258 + }, + { + "epoch": 0.104, + "grad_norm": 4.210982799530029, + "learning_rate": 5.1214034086126685e-06, + "loss": 0.4357, + "step": 260 + }, + { + "epoch": 0.1048, + "grad_norm": 3.1034891605377197, + "learning_rate": 5.145798982047253e-06, + "loss": 0.4363, + "step": 262 + }, + { + "epoch": 0.1056, + "grad_norm": 5.984959602355957, + "learning_rate": 5.170232409516483e-06, + "loss": 0.4626, + "step": 264 + }, + { + "epoch": 0.1064, + "grad_norm": 17.632976531982422, + "learning_rate": 5.194703500483597e-06, + "loss": 0.4285, + "step": 266 + }, + { + "epoch": 0.1072, + "grad_norm": 7.416657447814941, + "learning_rate": 5.219212064118082e-06, + "loss": 0.3839, + "step": 268 + }, + { + "epoch": 0.108, + "grad_norm": 9.656270980834961, + "learning_rate": 5.24375790929725e-06, + "loss": 0.4548, + "step": 270 + }, + { + "epoch": 0.1088, + "grad_norm": 50.1550178527832, + "learning_rate": 5.268340844607653e-06, + "loss": 4.0707, + "step": 272 + }, + { + "epoch": 0.1096, + "grad_norm": 9.233906745910645, + "learning_rate": 5.2929606783466735e-06, + "loss": 0.662, + "step": 274 + }, + { + "epoch": 0.1104, + "grad_norm": 7.082289218902588, + "learning_rate": 5.317617218523853e-06, + "loss": 0.6863, + "step": 276 + }, + { + "epoch": 0.1112, + "grad_norm": 7.848796367645264, + "learning_rate": 5.342310272862553e-06, + "loss": 0.6412, + "step": 278 + }, + { + "epoch": 0.112, + "grad_norm": 3.84726881980896, + "learning_rate": 5.367039648801377e-06, + "loss": 0.3674, + "step": 280 + }, + { + "epoch": 0.1128, + "grad_norm": 8.89835262298584, + "learning_rate": 5.391805153495684e-06, + "loss": 0.5559, + "step": 282 + }, + { + "epoch": 0.1136, + "grad_norm": 8.333953857421875, + "learning_rate": 5.416606593819109e-06, + "loss": 0.7547, + "step": 284 + }, + { + "epoch": 0.1144, + "grad_norm": 5.093605995178223, + "learning_rate": 5.441443776365005e-06, + "loss": 0.4324, + "step": 286 + }, + { + "epoch": 0.1152, + "grad_norm": 5.2162017822265625, + "learning_rate": 5.466316507448053e-06, + "loss": 0.8642, + "step": 288 + }, + { + "epoch": 0.116, + "grad_norm": 10.420114517211914, + "learning_rate": 5.49122459310568e-06, + "loss": 0.6503, + "step": 290 + }, + { + "epoch": 0.1168, + "grad_norm": 6.556491851806641, + "learning_rate": 5.516167839099662e-06, + "loss": 0.6044, + "step": 292 + }, + { + "epoch": 0.1176, + "grad_norm": 12.74498176574707, + "learning_rate": 5.5411460509175605e-06, + "loss": 0.7149, + "step": 294 + }, + { + "epoch": 0.1184, + "grad_norm": 7.758618354797363, + "learning_rate": 5.5661590337742255e-06, + "loss": 0.4331, + "step": 296 + }, + { + "epoch": 0.1192, + "grad_norm": 6.872960567474365, + "learning_rate": 5.591206592613412e-06, + "loss": 0.5118, + "step": 298 + }, + { + "epoch": 0.12, + "grad_norm": 4.9478302001953125, + "learning_rate": 5.616288532109221e-06, + "loss": 0.3771, + "step": 300 + }, + { + "epoch": 0.1208, + "grad_norm": 4.419280529022217, + "learning_rate": 5.641404656667652e-06, + "loss": 0.3816, + "step": 302 + }, + { + "epoch": 0.1216, + "grad_norm": 4.0377936363220215, + "learning_rate": 5.666554770428136e-06, + "loss": 0.3645, + "step": 304 + }, + { + "epoch": 0.1224, + "grad_norm": 7.112875938415527, + "learning_rate": 5.6917386772650015e-06, + "loss": 0.4655, + "step": 306 + }, + { + "epoch": 0.1232, + "grad_norm": 4.4771904945373535, + "learning_rate": 5.716956180789086e-06, + "loss": 0.4779, + "step": 308 + }, + { + "epoch": 0.124, + "grad_norm": 5.968297958374023, + "learning_rate": 5.74220708434926e-06, + "loss": 0.8389, + "step": 310 + }, + { + "epoch": 0.1248, + "grad_norm": 7.576871395111084, + "learning_rate": 5.767491191033909e-06, + "loss": 0.4136, + "step": 312 + }, + { + "epoch": 0.1256, + "grad_norm": 6.465977668762207, + "learning_rate": 5.7928083036724535e-06, + "loss": 0.3366, + "step": 314 + }, + { + "epoch": 0.1264, + "grad_norm": 8.43101692199707, + "learning_rate": 5.818158224836983e-06, + "loss": 0.4671, + "step": 316 + }, + { + "epoch": 0.1272, + "grad_norm": 6.674490928649902, + "learning_rate": 5.8435407568437194e-06, + "loss": 1.8329, + "step": 318 + }, + { + "epoch": 0.128, + "grad_norm": 4.587431907653809, + "learning_rate": 5.868955701754577e-06, + "loss": 0.339, + "step": 320 + }, + { + "epoch": 0.1288, + "grad_norm": 12.540658950805664, + "learning_rate": 5.894402861378714e-06, + "loss": 0.5991, + "step": 322 + }, + { + "epoch": 0.1296, + "grad_norm": 6.9753875732421875, + "learning_rate": 5.919882037274065e-06, + "loss": 0.4035, + "step": 324 + }, + { + "epoch": 0.1304, + "grad_norm": 4.0211663246154785, + "learning_rate": 5.9453930307488985e-06, + "loss": 0.5661, + "step": 326 + }, + { + "epoch": 0.1312, + "grad_norm": 11.671126365661621, + "learning_rate": 5.970935642863362e-06, + "loss": 0.776, + "step": 328 + }, + { + "epoch": 0.132, + "grad_norm": 4.407910346984863, + "learning_rate": 5.996509674431038e-06, + "loss": 0.4211, + "step": 330 + }, + { + "epoch": 0.1328, + "grad_norm": 3.740440607070923, + "learning_rate": 6.022114926020505e-06, + "loss": 0.3472, + "step": 332 + }, + { + "epoch": 0.1336, + "grad_norm": 4.749978542327881, + "learning_rate": 6.047751197956836e-06, + "loss": 0.2956, + "step": 334 + }, + { + "epoch": 0.1344, + "grad_norm": 3.446544647216797, + "learning_rate": 6.0734182903232475e-06, + "loss": 0.6477, + "step": 336 + }, + { + "epoch": 0.1352, + "grad_norm": 8.398823738098145, + "learning_rate": 6.0991160029626e-06, + "loss": 0.6604, + "step": 338 + }, + { + "epoch": 0.136, + "grad_norm": 3.5237298011779785, + "learning_rate": 6.124844135478966e-06, + "loss": 0.4482, + "step": 340 + }, + { + "epoch": 0.1368, + "grad_norm": 5.967245578765869, + "learning_rate": 6.1506024872392e-06, + "loss": 0.4079, + "step": 342 + }, + { + "epoch": 0.1376, + "grad_norm": 9.490840911865234, + "learning_rate": 6.176390857374501e-06, + "loss": 0.8531, + "step": 344 + }, + { + "epoch": 0.1384, + "grad_norm": 3.8971786499023438, + "learning_rate": 6.202209044781979e-06, + "loss": 0.4008, + "step": 346 + }, + { + "epoch": 0.1392, + "grad_norm": 4.254534721374512, + "learning_rate": 6.228056848126223e-06, + "loss": 0.6325, + "step": 348 + }, + { + "epoch": 0.14, + "grad_norm": 5.16749906539917, + "learning_rate": 6.253934065840883e-06, + "loss": 0.3609, + "step": 350 + }, + { + "epoch": 0.1408, + "grad_norm": 7.321227073669434, + "learning_rate": 6.279840496130188e-06, + "loss": 0.3653, + "step": 352 + }, + { + "epoch": 0.1416, + "grad_norm": 7.62925386428833, + "learning_rate": 6.305775936970606e-06, + "loss": 0.8053, + "step": 354 + }, + { + "epoch": 0.1424, + "grad_norm": 4.928267002105713, + "learning_rate": 6.331740186112359e-06, + "loss": 0.5276, + "step": 356 + }, + { + "epoch": 0.1432, + "grad_norm": 2.6523029804229736, + "learning_rate": 6.357733041081015e-06, + "loss": 0.2917, + "step": 358 + }, + { + "epoch": 0.144, + "grad_norm": 12.000823020935059, + "learning_rate": 6.383754299179072e-06, + "loss": 0.5779, + "step": 360 + }, + { + "epoch": 0.1448, + "grad_norm": 4.4840407371521, + "learning_rate": 6.409803757487532e-06, + "loss": 0.5981, + "step": 362 + }, + { + "epoch": 0.1456, + "grad_norm": 5.198264122009277, + "learning_rate": 6.435881212867485e-06, + "loss": 0.6076, + "step": 364 + }, + { + "epoch": 0.1464, + "grad_norm": 3.4151840209960938, + "learning_rate": 6.4619864619616975e-06, + "loss": 0.4114, + "step": 366 + }, + { + "epoch": 0.1472, + "grad_norm": 6.271514415740967, + "learning_rate": 6.48811930119619e-06, + "loss": 0.4758, + "step": 368 + }, + { + "epoch": 0.148, + "grad_norm": 17.935209274291992, + "learning_rate": 6.514279526781853e-06, + "loss": 0.7199, + "step": 370 + }, + { + "epoch": 0.1488, + "grad_norm": 14.56944465637207, + "learning_rate": 6.540466934715955e-06, + "loss": 0.6454, + "step": 372 + }, + { + "epoch": 0.1496, + "grad_norm": 4.990140438079834, + "learning_rate": 6.566681320783848e-06, + "loss": 0.2431, + "step": 374 + }, + { + "epoch": 0.1504, + "grad_norm": 6.392396450042725, + "learning_rate": 6.592922480560483e-06, + "loss": 0.3967, + "step": 376 + }, + { + "epoch": 0.1512, + "grad_norm": 6.542480945587158, + "learning_rate": 6.619190209412025e-06, + "loss": 0.577, + "step": 378 + }, + { + "epoch": 0.152, + "grad_norm": 6.3435893058776855, + "learning_rate": 6.6454843024974465e-06, + "loss": 0.3547, + "step": 380 + }, + { + "epoch": 0.1528, + "grad_norm": 7.85693883895874, + "learning_rate": 6.671804554770128e-06, + "loss": 0.9327, + "step": 382 + }, + { + "epoch": 0.1536, + "grad_norm": 4.3096137046813965, + "learning_rate": 6.698150760979456e-06, + "loss": 0.6549, + "step": 384 + }, + { + "epoch": 0.1544, + "grad_norm": 5.939504146575928, + "learning_rate": 6.724522715672421e-06, + "loss": 0.518, + "step": 386 + }, + { + "epoch": 0.1552, + "grad_norm": 5.392618656158447, + "learning_rate": 6.750920213195242e-06, + "loss": 0.7825, + "step": 388 + }, + { + "epoch": 0.156, + "grad_norm": 9.22570514678955, + "learning_rate": 6.777343047694894e-06, + "loss": 0.681, + "step": 390 + }, + { + "epoch": 0.1568, + "grad_norm": 5.902850151062012, + "learning_rate": 6.803791013120824e-06, + "loss": 0.3366, + "step": 392 + }, + { + "epoch": 0.1576, + "grad_norm": 6.317704200744629, + "learning_rate": 6.8302639032264836e-06, + "loss": 0.6636, + "step": 394 + }, + { + "epoch": 0.1584, + "grad_norm": 2.739799737930298, + "learning_rate": 6.856761511570944e-06, + "loss": 0.3568, + "step": 396 + }, + { + "epoch": 0.1592, + "grad_norm": 10.551313400268555, + "learning_rate": 6.883283631520579e-06, + "loss": 0.5487, + "step": 398 + }, + { + "epoch": 0.16, + "grad_norm": 8.61369800567627, + "learning_rate": 6.909830056250522e-06, + "loss": 0.5602, + "step": 400 + }, + { + "epoch": 0.1608, + "grad_norm": 8.547295570373535, + "learning_rate": 6.936400578746436e-06, + "loss": 0.6884, + "step": 402 + }, + { + "epoch": 0.1616, + "grad_norm": 16.45806312561035, + "learning_rate": 6.96299499180605e-06, + "loss": 0.7216, + "step": 404 + }, + { + "epoch": 0.1624, + "grad_norm": 3.695611000061035, + "learning_rate": 6.989613088040787e-06, + "loss": 0.3861, + "step": 406 + }, + { + "epoch": 0.1632, + "grad_norm": 6.287961959838867, + "learning_rate": 7.016254659877404e-06, + "loss": 0.4634, + "step": 408 + }, + { + "epoch": 0.164, + "grad_norm": 7.198334217071533, + "learning_rate": 7.042919499559539e-06, + "loss": 0.473, + "step": 410 + }, + { + "epoch": 0.1648, + "grad_norm": 8.600248336791992, + "learning_rate": 7.06960739914943e-06, + "loss": 0.6679, + "step": 412 + }, + { + "epoch": 0.1656, + "grad_norm": 3.197615146636963, + "learning_rate": 7.09631815052946e-06, + "loss": 0.3843, + "step": 414 + }, + { + "epoch": 0.1664, + "grad_norm": 3.0980639457702637, + "learning_rate": 7.123051545403873e-06, + "loss": 0.6683, + "step": 416 + }, + { + "epoch": 0.1672, + "grad_norm": 6.608954906463623, + "learning_rate": 7.1498073753002375e-06, + "loss": 0.7753, + "step": 418 + }, + { + "epoch": 0.168, + "grad_norm": 7.584798812866211, + "learning_rate": 7.1765854315712325e-06, + "loss": 0.5333, + "step": 420 + }, + { + "epoch": 0.1688, + "grad_norm": 6.7867960929870605, + "learning_rate": 7.203385505396197e-06, + "loss": 0.4801, + "step": 422 + }, + { + "epoch": 0.1696, + "grad_norm": 2.2217888832092285, + "learning_rate": 7.230207387782771e-06, + "loss": 0.2836, + "step": 424 + }, + { + "epoch": 0.1704, + "grad_norm": 3.6246209144592285, + "learning_rate": 7.257050869568527e-06, + "loss": 0.4577, + "step": 426 + }, + { + "epoch": 0.1712, + "grad_norm": 3.1784732341766357, + "learning_rate": 7.28391574142262e-06, + "loss": 0.5712, + "step": 428 + }, + { + "epoch": 0.172, + "grad_norm": 2.8604462146759033, + "learning_rate": 7.3108017938473485e-06, + "loss": 0.3046, + "step": 430 + }, + { + "epoch": 0.1728, + "grad_norm": 3.685370922088623, + "learning_rate": 7.337708817179875e-06, + "loss": 0.6305, + "step": 432 + }, + { + "epoch": 0.1736, + "grad_norm": 7.055061340332031, + "learning_rate": 7.36463660159386e-06, + "loss": 0.5632, + "step": 434 + }, + { + "epoch": 0.1744, + "grad_norm": 8.530369758605957, + "learning_rate": 7.39158493710103e-06, + "loss": 0.6991, + "step": 436 + }, + { + "epoch": 0.1752, + "grad_norm": 3.476318597793579, + "learning_rate": 7.418553613552822e-06, + "loss": 0.4436, + "step": 438 + }, + { + "epoch": 0.176, + "grad_norm": 2.979520559310913, + "learning_rate": 7.445542420642091e-06, + "loss": 0.3924, + "step": 440 + }, + { + "epoch": 0.1768, + "grad_norm": 6.099870681762695, + "learning_rate": 7.472551147904703e-06, + "loss": 0.4088, + "step": 442 + }, + { + "epoch": 0.1776, + "grad_norm": 8.147076606750488, + "learning_rate": 7.499579584721173e-06, + "loss": 0.4561, + "step": 444 + }, + { + "epoch": 0.1784, + "grad_norm": 6.481442451477051, + "learning_rate": 7.5266275203183395e-06, + "loss": 0.5383, + "step": 446 + }, + { + "epoch": 0.1792, + "grad_norm": 2.7729148864746094, + "learning_rate": 7.553694743770917e-06, + "loss": 0.3598, + "step": 448 + }, + { + "epoch": 0.18, + "grad_norm": 8.662606239318848, + "learning_rate": 7.580781044003312e-06, + "loss": 0.6492, + "step": 450 + }, + { + "epoch": 0.1808, + "grad_norm": 7.1027374267578125, + "learning_rate": 7.607886209791095e-06, + "loss": 0.8726, + "step": 452 + }, + { + "epoch": 0.1816, + "grad_norm": 3.737102508544922, + "learning_rate": 7.635010029762755e-06, + "loss": 0.4625, + "step": 454 + }, + { + "epoch": 0.1824, + "grad_norm": 7.406997203826904, + "learning_rate": 7.662152292401265e-06, + "loss": 0.6214, + "step": 456 + }, + { + "epoch": 0.1832, + "grad_norm": 6.555530548095703, + "learning_rate": 7.689312786045822e-06, + "loss": 0.7548, + "step": 458 + }, + { + "epoch": 0.184, + "grad_norm": 3.5676968097686768, + "learning_rate": 7.716491298893441e-06, + "loss": 0.3623, + "step": 460 + }, + { + "epoch": 0.1848, + "grad_norm": 5.894664287567139, + "learning_rate": 7.74368761900062e-06, + "loss": 0.5739, + "step": 462 + }, + { + "epoch": 0.1856, + "grad_norm": 4.684706687927246, + "learning_rate": 7.770901534284991e-06, + "loss": 0.6722, + "step": 464 + }, + { + "epoch": 0.1864, + "grad_norm": 8.258028984069824, + "learning_rate": 7.798132832526976e-06, + "loss": 0.4903, + "step": 466 + }, + { + "epoch": 0.1872, + "grad_norm": 11.913901329040527, + "learning_rate": 7.825381301371444e-06, + "loss": 0.3768, + "step": 468 + }, + { + "epoch": 0.188, + "grad_norm": 2.9942965507507324, + "learning_rate": 7.852646728329358e-06, + "loss": 0.4586, + "step": 470 + }, + { + "epoch": 0.1888, + "grad_norm": 9.535568237304688, + "learning_rate": 7.879928900779441e-06, + "loss": 0.6082, + "step": 472 + }, + { + "epoch": 0.1896, + "grad_norm": 11.259536743164062, + "learning_rate": 7.907227605969852e-06, + "loss": 0.859, + "step": 474 + }, + { + "epoch": 0.1904, + "grad_norm": 10.543761253356934, + "learning_rate": 7.934542631019767e-06, + "loss": 0.4658, + "step": 476 + }, + { + "epoch": 0.1912, + "grad_norm": 9.306256294250488, + "learning_rate": 7.961873762921153e-06, + "loss": 0.9107, + "step": 478 + }, + { + "epoch": 0.192, + "grad_norm": 3.1275129318237305, + "learning_rate": 7.989220788540351e-06, + "loss": 0.4828, + "step": 480 + }, + { + "epoch": 0.1928, + "grad_norm": 10.207925796508789, + "learning_rate": 8.016583494619764e-06, + "loss": 0.3932, + "step": 482 + }, + { + "epoch": 0.1936, + "grad_norm": 3.0926074981689453, + "learning_rate": 8.043961667779511e-06, + "loss": 0.514, + "step": 484 + }, + { + "epoch": 0.1944, + "grad_norm": 6.525674343109131, + "learning_rate": 8.071355094519103e-06, + "loss": 0.3918, + "step": 486 + }, + { + "epoch": 0.1952, + "grad_norm": 2.066398859024048, + "learning_rate": 8.098763561219089e-06, + "loss": 0.3501, + "step": 488 + }, + { + "epoch": 0.196, + "grad_norm": 4.382544040679932, + "learning_rate": 8.126186854142744e-06, + "loss": 0.4171, + "step": 490 + }, + { + "epoch": 0.1968, + "grad_norm": 2.5938968658447266, + "learning_rate": 8.153624759437718e-06, + "loss": 0.3604, + "step": 492 + }, + { + "epoch": 0.1976, + "grad_norm": 5.127084255218506, + "learning_rate": 8.181077063137735e-06, + "loss": 0.4352, + "step": 494 + }, + { + "epoch": 0.1984, + "grad_norm": 2.662642478942871, + "learning_rate": 8.208543551164178e-06, + "loss": 0.4609, + "step": 496 + }, + { + "epoch": 0.1992, + "grad_norm": 6.415817737579346, + "learning_rate": 8.236024009327877e-06, + "loss": 0.6271, + "step": 498 + }, + { + "epoch": 0.2, + "grad_norm": 4.131330490112305, + "learning_rate": 8.263518223330695e-06, + "loss": 0.5425, + "step": 500 + }, + { + "epoch": 0.2008, + "grad_norm": 4.420889377593994, + "learning_rate": 8.29102597876723e-06, + "loss": 0.5649, + "step": 502 + }, + { + "epoch": 0.2016, + "grad_norm": 5.390644073486328, + "learning_rate": 8.31854706112648e-06, + "loss": 0.6567, + "step": 504 + }, + { + "epoch": 0.2024, + "grad_norm": 7.901724338531494, + "learning_rate": 8.346081255793516e-06, + "loss": 0.4477, + "step": 506 + }, + { + "epoch": 0.2032, + "grad_norm": 3.4163174629211426, + "learning_rate": 8.373628348051156e-06, + "loss": 0.4377, + "step": 508 + }, + { + "epoch": 0.204, + "grad_norm": 5.113077163696289, + "learning_rate": 8.401188123081642e-06, + "loss": 0.3727, + "step": 510 + }, + { + "epoch": 0.2048, + "grad_norm": 2.9778358936309814, + "learning_rate": 8.428760365968329e-06, + "loss": 0.5046, + "step": 512 + }, + { + "epoch": 0.2056, + "grad_norm": 9.621271133422852, + "learning_rate": 8.456344861697293e-06, + "loss": 0.5682, + "step": 514 + }, + { + "epoch": 0.2064, + "grad_norm": 3.1639251708984375, + "learning_rate": 8.483941395159114e-06, + "loss": 0.4007, + "step": 516 + }, + { + "epoch": 0.2072, + "grad_norm": 14.570175170898438, + "learning_rate": 8.511549751150478e-06, + "loss": 0.6807, + "step": 518 + }, + { + "epoch": 0.208, + "grad_norm": 4.703779220581055, + "learning_rate": 8.539169714375883e-06, + "loss": 0.362, + "step": 520 + }, + { + "epoch": 0.2088, + "grad_norm": 8.251890182495117, + "learning_rate": 8.566801069449304e-06, + "loss": 0.669, + "step": 522 + }, + { + "epoch": 0.2096, + "grad_norm": 7.932413101196289, + "learning_rate": 8.594443600895886e-06, + "loss": 0.5107, + "step": 524 + }, + { + "epoch": 0.2104, + "grad_norm": 3.570723533630371, + "learning_rate": 8.622097093153612e-06, + "loss": 0.3977, + "step": 526 + }, + { + "epoch": 0.2112, + "grad_norm": 7.7874979972839355, + "learning_rate": 8.649761330575e-06, + "loss": 0.5925, + "step": 528 + }, + { + "epoch": 0.212, + "grad_norm": 6.07938814163208, + "learning_rate": 8.677436097428766e-06, + "loss": 0.5776, + "step": 530 + }, + { + "epoch": 0.2128, + "grad_norm": 5.536543369293213, + "learning_rate": 8.705121177901537e-06, + "loss": 0.4131, + "step": 532 + }, + { + "epoch": 0.2136, + "grad_norm": 3.3589935302734375, + "learning_rate": 8.732816356099459e-06, + "loss": 0.4839, + "step": 534 + }, + { + "epoch": 0.2144, + "grad_norm": 2.7732083797454834, + "learning_rate": 8.760521416049986e-06, + "loss": 0.2898, + "step": 536 + }, + { + "epoch": 0.2152, + "grad_norm": 3.485111713409424, + "learning_rate": 8.788236141703477e-06, + "loss": 0.2139, + "step": 538 + }, + { + "epoch": 0.216, + "grad_norm": 5.76864767074585, + "learning_rate": 8.81596031693499e-06, + "loss": 0.445, + "step": 540 + }, + { + "epoch": 0.2168, + "grad_norm": 3.4889819622039795, + "learning_rate": 8.84369372554578e-06, + "loss": 0.3836, + "step": 542 + }, + { + "epoch": 0.2176, + "grad_norm": 3.0347912311553955, + "learning_rate": 8.87143615126518e-06, + "loss": 0.2902, + "step": 544 + }, + { + "epoch": 0.2184, + "grad_norm": 8.731011390686035, + "learning_rate": 8.899187377752173e-06, + "loss": 0.4252, + "step": 546 + }, + { + "epoch": 0.2192, + "grad_norm": 2.755082130432129, + "learning_rate": 8.926947188597127e-06, + "loss": 0.3409, + "step": 548 + }, + { + "epoch": 0.22, + "grad_norm": 7.3828043937683105, + "learning_rate": 8.954715367323473e-06, + "loss": 0.416, + "step": 550 + }, + { + "epoch": 0.2208, + "grad_norm": 2.2503442764282227, + "learning_rate": 8.982491697389344e-06, + "loss": 0.655, + "step": 552 + }, + { + "epoch": 0.2216, + "grad_norm": 6.455631732940674, + "learning_rate": 9.010275962189356e-06, + "loss": 0.5366, + "step": 554 + }, + { + "epoch": 0.2224, + "grad_norm": 4.30733060836792, + "learning_rate": 9.03806794505621e-06, + "loss": 0.5306, + "step": 556 + }, + { + "epoch": 0.2232, + "grad_norm": 10.166731834411621, + "learning_rate": 9.065867429262497e-06, + "loss": 1.083, + "step": 558 + }, + { + "epoch": 0.224, + "grad_norm": 3.9657294750213623, + "learning_rate": 9.093674198022198e-06, + "loss": 0.675, + "step": 560 + }, + { + "epoch": 0.2248, + "grad_norm": 11.929780006408691, + "learning_rate": 9.121488034492567e-06, + "loss": 0.6316, + "step": 562 + }, + { + "epoch": 0.2256, + "grad_norm": 10.191093444824219, + "learning_rate": 9.149308721775717e-06, + "loss": 0.6091, + "step": 564 + }, + { + "epoch": 0.2264, + "grad_norm": 7.18539571762085, + "learning_rate": 9.177136042920338e-06, + "loss": 0.403, + "step": 566 + }, + { + "epoch": 0.2272, + "grad_norm": 3.656543016433716, + "learning_rate": 9.204969780923396e-06, + "loss": 0.6639, + "step": 568 + }, + { + "epoch": 0.228, + "grad_norm": 7.359060764312744, + "learning_rate": 9.232809718731822e-06, + "loss": 0.3566, + "step": 570 + }, + { + "epoch": 0.2288, + "grad_norm": 9.004820823669434, + "learning_rate": 9.26065563924414e-06, + "loss": 0.6622, + "step": 572 + }, + { + "epoch": 0.2296, + "grad_norm": 10.838543891906738, + "learning_rate": 9.288507325312319e-06, + "loss": 0.6195, + "step": 574 + }, + { + "epoch": 0.2304, + "grad_norm": 2.975687265396118, + "learning_rate": 9.316364559743298e-06, + "loss": 0.3825, + "step": 576 + }, + { + "epoch": 0.2312, + "grad_norm": 8.698026657104492, + "learning_rate": 9.344227125300788e-06, + "loss": 1.0027, + "step": 578 + }, + { + "epoch": 0.232, + "grad_norm": 5.095306396484375, + "learning_rate": 9.372094804706867e-06, + "loss": 0.4516, + "step": 580 + }, + { + "epoch": 0.2328, + "grad_norm": 11.336697578430176, + "learning_rate": 9.39996738064379e-06, + "loss": 1.0363, + "step": 582 + }, + { + "epoch": 0.2336, + "grad_norm": 9.10135269165039, + "learning_rate": 9.427844635755615e-06, + "loss": 0.437, + "step": 584 + }, + { + "epoch": 0.2344, + "grad_norm": 5.346004962921143, + "learning_rate": 9.455726352649904e-06, + "loss": 0.476, + "step": 586 + }, + { + "epoch": 0.2352, + "grad_norm": 6.9864983558654785, + "learning_rate": 9.483612313899446e-06, + "loss": 0.6293, + "step": 588 + }, + { + "epoch": 0.236, + "grad_norm": 3.372296094894409, + "learning_rate": 9.511502302043859e-06, + "loss": 0.4772, + "step": 590 + }, + { + "epoch": 0.2368, + "grad_norm": 6.616199493408203, + "learning_rate": 9.539396099591469e-06, + "loss": 0.6176, + "step": 592 + }, + { + "epoch": 0.2376, + "grad_norm": 10.571331977844238, + "learning_rate": 9.567293489020816e-06, + "loss": 0.6476, + "step": 594 + }, + { + "epoch": 0.2384, + "grad_norm": 6.520980358123779, + "learning_rate": 9.595194252782461e-06, + "loss": 0.4728, + "step": 596 + }, + { + "epoch": 0.2392, + "grad_norm": 5.69945764541626, + "learning_rate": 9.623098173300656e-06, + "loss": 0.3048, + "step": 598 + }, + { + "epoch": 0.24, + "grad_norm": 2.8320627212524414, + "learning_rate": 9.651005032974991e-06, + "loss": 0.3585, + "step": 600 + }, + { + "epoch": 0.2408, + "grad_norm": 5.430441379547119, + "learning_rate": 9.678914614182184e-06, + "loss": 0.362, + "step": 602 + }, + { + "epoch": 0.2416, + "grad_norm": 6.453746795654297, + "learning_rate": 9.706826699277714e-06, + "loss": 0.5644, + "step": 604 + }, + { + "epoch": 0.2424, + "grad_norm": 5.222368240356445, + "learning_rate": 9.734741070597535e-06, + "loss": 0.4847, + "step": 606 + }, + { + "epoch": 0.2432, + "grad_norm": 8.284361839294434, + "learning_rate": 9.762657510459774e-06, + "loss": 0.3067, + "step": 608 + }, + { + "epoch": 0.244, + "grad_norm": 2.8420135974884033, + "learning_rate": 9.790575801166422e-06, + "loss": 0.4242, + "step": 610 + }, + { + "epoch": 0.2448, + "grad_norm": 3.083292245864868, + "learning_rate": 9.818495725005043e-06, + "loss": 1.6922, + "step": 612 + }, + { + "epoch": 0.2456, + "grad_norm": 7.784399509429932, + "learning_rate": 9.846417064250459e-06, + "loss": 0.4656, + "step": 614 + }, + { + "epoch": 0.2464, + "grad_norm": 2.823225736618042, + "learning_rate": 9.874339601166479e-06, + "loss": 0.6133, + "step": 616 + }, + { + "epoch": 0.2472, + "grad_norm": 2.745265483856201, + "learning_rate": 9.902263118007513e-06, + "loss": 0.4453, + "step": 618 + }, + { + "epoch": 0.248, + "grad_norm": 6.4653191566467285, + "learning_rate": 9.930187397020385e-06, + "loss": 0.4293, + "step": 620 + }, + { + "epoch": 0.2488, + "grad_norm": 7.129982948303223, + "learning_rate": 9.95811222044596e-06, + "loss": 0.662, + "step": 622 + }, + { + "epoch": 0.2496, + "grad_norm": 3.3546640872955322, + "learning_rate": 9.986037370520855e-06, + "loss": 0.4132, + "step": 624 + }, + { + "epoch": 0.2504, + "grad_norm": 6.033866882324219, + "learning_rate": 1.0013962629479139e-05, + "loss": 0.4106, + "step": 626 + }, + { + "epoch": 0.2512, + "grad_norm": 2.7721498012542725, + "learning_rate": 1.0041887779554034e-05, + "loss": 0.435, + "step": 628 + }, + { + "epoch": 0.252, + "grad_norm": 4.638468265533447, + "learning_rate": 1.0069812602979607e-05, + "loss": 0.3248, + "step": 630 + }, + { + "epoch": 0.2528, + "grad_norm": 4.487112522125244, + "learning_rate": 1.0097736881992482e-05, + "loss": 0.2748, + "step": 632 + }, + { + "epoch": 0.2536, + "grad_norm": 3.1372745037078857, + "learning_rate": 1.0125660398833514e-05, + "loss": 0.5276, + "step": 634 + }, + { + "epoch": 0.2544, + "grad_norm": 3.534034013748169, + "learning_rate": 1.0153582935749533e-05, + "loss": 0.3561, + "step": 636 + }, + { + "epoch": 0.2552, + "grad_norm": 3.8285937309265137, + "learning_rate": 1.0181504274994952e-05, + "loss": 0.2581, + "step": 638 + }, + { + "epoch": 0.256, + "grad_norm": 8.048152923583984, + "learning_rate": 1.0209424198833571e-05, + "loss": 0.6331, + "step": 640 + }, + { + "epoch": 0.2568, + "grad_norm": 14.114180564880371, + "learning_rate": 1.0237342489540218e-05, + "loss": 0.5025, + "step": 642 + }, + { + "epoch": 0.2576, + "grad_norm": 2.971632480621338, + "learning_rate": 1.0265258929402458e-05, + "loss": 0.3162, + "step": 644 + }, + { + "epoch": 0.2584, + "grad_norm": 3.681770086288452, + "learning_rate": 1.029317330072228e-05, + "loss": 0.555, + "step": 646 + }, + { + "epoch": 0.2592, + "grad_norm": 6.693310260772705, + "learning_rate": 1.0321085385817811e-05, + "loss": 0.4238, + "step": 648 + }, + { + "epoch": 0.26, + "grad_norm": 7.489689350128174, + "learning_rate": 1.0348994967025004e-05, + "loss": 0.5693, + "step": 650 + }, + { + "epoch": 0.2608, + "grad_norm": 10.314934730529785, + "learning_rate": 1.0376901826699337e-05, + "loss": 0.444, + "step": 652 + }, + { + "epoch": 0.2616, + "grad_norm": 8.000083923339844, + "learning_rate": 1.0404805747217532e-05, + "loss": 1.1624, + "step": 654 + }, + { + "epoch": 0.2624, + "grad_norm": 3.0513830184936523, + "learning_rate": 1.0432706510979175e-05, + "loss": 0.3671, + "step": 656 + }, + { + "epoch": 0.2632, + "grad_norm": 5.537996768951416, + "learning_rate": 1.0460603900408526e-05, + "loss": 0.3633, + "step": 658 + }, + { + "epoch": 0.264, + "grad_norm": 3.6096031665802, + "learning_rate": 1.0488497697956134e-05, + "loss": 0.3869, + "step": 660 + }, + { + "epoch": 0.2648, + "grad_norm": 3.872437000274658, + "learning_rate": 1.0516387686100549e-05, + "loss": 0.2569, + "step": 662 + }, + { + "epoch": 0.2656, + "grad_norm": 19.229639053344727, + "learning_rate": 1.054427364735009e-05, + "loss": 1.5103, + "step": 664 + }, + { + "epoch": 0.2664, + "grad_norm": 4.366555213928223, + "learning_rate": 1.0572155364244378e-05, + "loss": 0.4739, + "step": 666 + }, + { + "epoch": 0.2672, + "grad_norm": 12.398948669433594, + "learning_rate": 1.0600032619356203e-05, + "loss": 0.7889, + "step": 668 + }, + { + "epoch": 0.268, + "grad_norm": 14.852575302124023, + "learning_rate": 1.0627905195293127e-05, + "loss": 0.9629, + "step": 670 + }, + { + "epoch": 0.2688, + "grad_norm": 10.531519889831543, + "learning_rate": 1.0655772874699206e-05, + "loss": 0.8866, + "step": 672 + }, + { + "epoch": 0.2696, + "grad_norm": 9.607449531555176, + "learning_rate": 1.0683635440256694e-05, + "loss": 0.8125, + "step": 674 + }, + { + "epoch": 0.2704, + "grad_norm": 4.8872785568237305, + "learning_rate": 1.0711492674687674e-05, + "loss": 0.3938, + "step": 676 + }, + { + "epoch": 0.2712, + "grad_norm": 11.146080017089844, + "learning_rate": 1.0739344360755855e-05, + "loss": 0.8325, + "step": 678 + }, + { + "epoch": 0.272, + "grad_norm": 4.164681434631348, + "learning_rate": 1.0767190281268171e-05, + "loss": 0.4736, + "step": 680 + }, + { + "epoch": 0.2728, + "grad_norm": 7.81128454208374, + "learning_rate": 1.07950302190766e-05, + "loss": 0.3557, + "step": 682 + }, + { + "epoch": 0.2736, + "grad_norm": 4.940060138702393, + "learning_rate": 1.0822863957079654e-05, + "loss": 0.2446, + "step": 684 + }, + { + "epoch": 0.2744, + "grad_norm": 3.599069118499756, + "learning_rate": 1.0850691278224277e-05, + "loss": 0.3247, + "step": 686 + }, + { + "epoch": 0.2752, + "grad_norm": 2.723473072052002, + "learning_rate": 1.0878511965507428e-05, + "loss": 0.4377, + "step": 688 + }, + { + "epoch": 0.276, + "grad_norm": 9.285993576049805, + "learning_rate": 1.0906325801977795e-05, + "loss": 0.5683, + "step": 690 + }, + { + "epoch": 0.2768, + "grad_norm": 6.284435272216797, + "learning_rate": 1.0934132570737497e-05, + "loss": 0.4374, + "step": 692 + }, + { + "epoch": 0.2776, + "grad_norm": 3.0146565437316895, + "learning_rate": 1.0961932054943785e-05, + "loss": 0.36, + "step": 694 + }, + { + "epoch": 0.2784, + "grad_norm": 8.176236152648926, + "learning_rate": 1.098972403781064e-05, + "loss": 0.7033, + "step": 696 + }, + { + "epoch": 0.2792, + "grad_norm": 7.648057460784912, + "learning_rate": 1.101750830261065e-05, + "loss": 0.4907, + "step": 698 + }, + { + "epoch": 0.28, + "grad_norm": 6.528674602508545, + "learning_rate": 1.104528463267652e-05, + "loss": 0.489, + "step": 700 + }, + { + "epoch": 0.2808, + "grad_norm": 5.457291603088379, + "learning_rate": 1.1073052811402867e-05, + "loss": 0.8648, + "step": 702 + }, + { + "epoch": 0.2816, + "grad_norm": 7.5247344970703125, + "learning_rate": 1.1100812622247821e-05, + "loss": 0.3886, + "step": 704 + }, + { + "epoch": 0.2824, + "grad_norm": 9.832022666931152, + "learning_rate": 1.1128563848734815e-05, + "loss": 0.7117, + "step": 706 + }, + { + "epoch": 0.2832, + "grad_norm": 12.296405792236328, + "learning_rate": 1.1156306274454211e-05, + "loss": 0.6653, + "step": 708 + }, + { + "epoch": 0.284, + "grad_norm": 3.188476324081421, + "learning_rate": 1.1184039683065002e-05, + "loss": 0.4074, + "step": 710 + }, + { + "epoch": 0.2848, + "grad_norm": 8.455747604370117, + "learning_rate": 1.1211763858296516e-05, + "loss": 0.803, + "step": 712 + }, + { + "epoch": 0.2856, + "grad_norm": 2.5128846168518066, + "learning_rate": 1.1239478583950007e-05, + "loss": 0.6004, + "step": 714 + }, + { + "epoch": 0.2864, + "grad_norm": 6.20869255065918, + "learning_rate": 1.1267183643900534e-05, + "loss": 0.2269, + "step": 716 + }, + { + "epoch": 0.2872, + "grad_norm": 3.149703025817871, + "learning_rate": 1.1294878822098456e-05, + "loss": 0.3468, + "step": 718 + }, + { + "epoch": 0.288, + "grad_norm": 3.046736717224121, + "learning_rate": 1.1322563902571227e-05, + "loss": 0.435, + "step": 720 + }, + { + "epoch": 0.2888, + "grad_norm": 3.9733293056488037, + "learning_rate": 1.1350238669424993e-05, + "loss": 0.336, + "step": 722 + }, + { + "epoch": 0.2896, + "grad_norm": 5.300455093383789, + "learning_rate": 1.137790290684638e-05, + "loss": 0.4059, + "step": 724 + }, + { + "epoch": 0.2904, + "grad_norm": 6.307999134063721, + "learning_rate": 1.1405556399104108e-05, + "loss": 0.5065, + "step": 726 + }, + { + "epoch": 0.2912, + "grad_norm": 7.641139030456543, + "learning_rate": 1.143319893055069e-05, + "loss": 0.8458, + "step": 728 + }, + { + "epoch": 0.292, + "grad_norm": 6.329631328582764, + "learning_rate": 1.1460830285624112e-05, + "loss": 0.3936, + "step": 730 + }, + { + "epoch": 0.2928, + "grad_norm": 3.9163966178894043, + "learning_rate": 1.1488450248849515e-05, + "loss": 0.2481, + "step": 732 + }, + { + "epoch": 0.2936, + "grad_norm": 4.952850341796875, + "learning_rate": 1.1516058604840881e-05, + "loss": 0.5636, + "step": 734 + }, + { + "epoch": 0.2944, + "grad_norm": 4.797139644622803, + "learning_rate": 1.15436551383027e-05, + "loss": 0.3305, + "step": 736 + }, + { + "epoch": 0.2952, + "grad_norm": 5.792111873626709, + "learning_rate": 1.1571239634031666e-05, + "loss": 0.3829, + "step": 738 + }, + { + "epoch": 0.296, + "grad_norm": 2.61521053314209, + "learning_rate": 1.1598811876918352e-05, + "loss": 0.4863, + "step": 740 + }, + { + "epoch": 0.2968, + "grad_norm": 4.18222188949585, + "learning_rate": 1.1626371651948839e-05, + "loss": 0.6332, + "step": 742 + }, + { + "epoch": 0.2976, + "grad_norm": 4.946098327636719, + "learning_rate": 1.1653918744206478e-05, + "loss": 0.4458, + "step": 744 + }, + { + "epoch": 0.2984, + "grad_norm": 5.32734489440918, + "learning_rate": 1.1681452938873515e-05, + "loss": 0.3399, + "step": 746 + }, + { + "epoch": 0.2992, + "grad_norm": 4.0147600173950195, + "learning_rate": 1.1708974021232763e-05, + "loss": 0.4765, + "step": 748 + }, + { + "epoch": 0.3, + "grad_norm": 5.177061080932617, + "learning_rate": 1.1736481776669297e-05, + "loss": 0.3631, + "step": 750 + }, + { + "epoch": 0.3008, + "grad_norm": 5.71055793762207, + "learning_rate": 1.1763975990672116e-05, + "loss": 0.3434, + "step": 752 + }, + { + "epoch": 0.3016, + "grad_norm": 2.9367713928222656, + "learning_rate": 1.1791456448835815e-05, + "loss": 0.338, + "step": 754 + }, + { + "epoch": 0.3024, + "grad_norm": 5.17765474319458, + "learning_rate": 1.1818922936862258e-05, + "loss": 0.2791, + "step": 756 + }, + { + "epoch": 0.3032, + "grad_norm": 11.932939529418945, + "learning_rate": 1.1846375240562274e-05, + "loss": 1.1732, + "step": 758 + }, + { + "epoch": 0.304, + "grad_norm": 3.952331304550171, + "learning_rate": 1.187381314585725e-05, + "loss": 0.3083, + "step": 760 + }, + { + "epoch": 0.3048, + "grad_norm": 2.7587730884552, + "learning_rate": 1.1901236438780906e-05, + "loss": 0.3518, + "step": 762 + }, + { + "epoch": 0.3056, + "grad_norm": 1.007660984992981, + "learning_rate": 1.192864490548089e-05, + "loss": 0.3737, + "step": 764 + }, + { + "epoch": 0.3064, + "grad_norm": 2.9483938217163086, + "learning_rate": 1.195603833222048e-05, + "loss": 0.2933, + "step": 766 + }, + { + "epoch": 0.3072, + "grad_norm": 2.8194210529327393, + "learning_rate": 1.198341650538023e-05, + "loss": 0.5113, + "step": 768 + }, + { + "epoch": 0.308, + "grad_norm": 6.031118869781494, + "learning_rate": 1.2010779211459642e-05, + "loss": 0.8163, + "step": 770 + }, + { + "epoch": 0.3088, + "grad_norm": 3.5361063480377197, + "learning_rate": 1.203812623707884e-05, + "loss": 0.7252, + "step": 772 + }, + { + "epoch": 0.3096, + "grad_norm": 3.8909547328948975, + "learning_rate": 1.2065457368980227e-05, + "loss": 0.4168, + "step": 774 + }, + { + "epoch": 0.3104, + "grad_norm": 3.7777271270751953, + "learning_rate": 1.2092772394030141e-05, + "loss": 0.9927, + "step": 776 + }, + { + "epoch": 0.3112, + "grad_norm": 4.374517440795898, + "learning_rate": 1.2120071099220552e-05, + "loss": 0.5356, + "step": 778 + }, + { + "epoch": 0.312, + "grad_norm": 7.112154483795166, + "learning_rate": 1.2147353271670637e-05, + "loss": 0.4746, + "step": 780 + }, + { + "epoch": 0.3128, + "grad_norm": 4.3697967529296875, + "learning_rate": 1.217461869862855e-05, + "loss": 0.5526, + "step": 782 + }, + { + "epoch": 0.3136, + "grad_norm": 8.930327415466309, + "learning_rate": 1.2201867167473015e-05, + "loss": 0.377, + "step": 784 + }, + { + "epoch": 0.3144, + "grad_norm": 3.697787046432495, + "learning_rate": 1.2229098465715002e-05, + "loss": 0.5277, + "step": 786 + }, + { + "epoch": 0.3152, + "grad_norm": 6.874600887298584, + "learning_rate": 1.2256312380999373e-05, + "loss": 0.7206, + "step": 788 + }, + { + "epoch": 0.316, + "grad_norm": 3.165311813354492, + "learning_rate": 1.2283508701106552e-05, + "loss": 0.4289, + "step": 790 + }, + { + "epoch": 0.3168, + "grad_norm": 8.956424713134766, + "learning_rate": 1.2310687213954173e-05, + "loss": 0.5692, + "step": 792 + }, + { + "epoch": 0.3176, + "grad_norm": 3.313727855682373, + "learning_rate": 1.233784770759873e-05, + "loss": 0.4057, + "step": 794 + }, + { + "epoch": 0.3184, + "grad_norm": 7.44467830657959, + "learning_rate": 1.2364989970237238e-05, + "loss": 0.425, + "step": 796 + }, + { + "epoch": 0.3192, + "grad_norm": 2.7032885551452637, + "learning_rate": 1.23921137902089e-05, + "loss": 1.1685, + "step": 798 + }, + { + "epoch": 0.32, + "grad_norm": 21.789257049560547, + "learning_rate": 1.241921895599668e-05, + "loss": 2.9202, + "step": 800 + }, + { + "epoch": 0.3208, + "grad_norm": 8.086333274841309, + "learning_rate": 1.2446305256229076e-05, + "loss": 0.4955, + "step": 802 + }, + { + "epoch": 0.3216, + "grad_norm": 3.5499794483184814, + "learning_rate": 1.2473372479681653e-05, + "loss": 0.5145, + "step": 804 + }, + { + "epoch": 0.3224, + "grad_norm": 7.596743583679199, + "learning_rate": 1.2500420415278822e-05, + "loss": 0.5452, + "step": 806 + }, + { + "epoch": 0.3232, + "grad_norm": 6.792609214782715, + "learning_rate": 1.2527448852095292e-05, + "loss": 0.4943, + "step": 808 + }, + { + "epoch": 0.324, + "grad_norm": 6.553757667541504, + "learning_rate": 1.2554457579357902e-05, + "loss": 0.6194, + "step": 810 + }, + { + "epoch": 0.3248, + "grad_norm": 7.149604797363281, + "learning_rate": 1.2581446386447171e-05, + "loss": 0.5983, + "step": 812 + }, + { + "epoch": 0.3256, + "grad_norm": 3.606539726257324, + "learning_rate": 1.2608415062898963e-05, + "loss": 0.5224, + "step": 814 + }, + { + "epoch": 0.3264, + "grad_norm": 2.280977725982666, + "learning_rate": 1.2635363398406133e-05, + "loss": 0.623, + "step": 816 + }, + { + "epoch": 0.3272, + "grad_norm": 10.369894981384277, + "learning_rate": 1.266229118282012e-05, + "loss": 0.8874, + "step": 818 + }, + { + "epoch": 0.328, + "grad_norm": 4.448359489440918, + "learning_rate": 1.2689198206152644e-05, + "loss": 0.352, + "step": 820 + }, + { + "epoch": 0.3288, + "grad_norm": 2.4175426959991455, + "learning_rate": 1.2716084258577373e-05, + "loss": 0.4758, + "step": 822 + }, + { + "epoch": 0.3296, + "grad_norm": 5.202447891235352, + "learning_rate": 1.2742949130431468e-05, + "loss": 0.3762, + "step": 824 + }, + { + "epoch": 0.3304, + "grad_norm": 10.678263664245605, + "learning_rate": 1.2769792612217224e-05, + "loss": 0.7578, + "step": 826 + }, + { + "epoch": 0.3312, + "grad_norm": 10.530842781066895, + "learning_rate": 1.2796614494603795e-05, + "loss": 0.5892, + "step": 828 + }, + { + "epoch": 0.332, + "grad_norm": 2.2692458629608154, + "learning_rate": 1.282341456842876e-05, + "loss": 0.4299, + "step": 830 + }, + { + "epoch": 0.3328, + "grad_norm": 29.20014762878418, + "learning_rate": 1.2850192624699756e-05, + "loss": 2.1729, + "step": 832 + }, + { + "epoch": 0.3336, + "grad_norm": 4.95890998840332, + "learning_rate": 1.2876948454596122e-05, + "loss": 0.7695, + "step": 834 + }, + { + "epoch": 0.3344, + "grad_norm": 4.845802307128906, + "learning_rate": 1.2903681849470535e-05, + "loss": 0.5292, + "step": 836 + }, + { + "epoch": 0.3352, + "grad_norm": 8.932195663452148, + "learning_rate": 1.2930392600850565e-05, + "loss": 0.7145, + "step": 838 + }, + { + "epoch": 0.336, + "grad_norm": 5.170701503753662, + "learning_rate": 1.2957080500440455e-05, + "loss": 0.6364, + "step": 840 + }, + { + "epoch": 0.3368, + "grad_norm": 10.944951057434082, + "learning_rate": 1.2983745340122589e-05, + "loss": 0.6809, + "step": 842 + }, + { + "epoch": 0.3376, + "grad_norm": 15.961918830871582, + "learning_rate": 1.3010386911959205e-05, + "loss": 0.6377, + "step": 844 + }, + { + "epoch": 0.3384, + "grad_norm": 7.693042755126953, + "learning_rate": 1.3037005008193944e-05, + "loss": 0.5823, + "step": 846 + }, + { + "epoch": 0.3392, + "grad_norm": 10.104445457458496, + "learning_rate": 1.3063599421253556e-05, + "loss": 0.4535, + "step": 848 + }, + { + "epoch": 0.34, + "grad_norm": 9.348386764526367, + "learning_rate": 1.309016994374947e-05, + "loss": 0.5694, + "step": 850 + }, + { + "epoch": 0.3408, + "grad_norm": 1.5034079551696777, + "learning_rate": 1.3116716368479415e-05, + "loss": 0.3579, + "step": 852 + }, + { + "epoch": 0.3416, + "grad_norm": 6.123833179473877, + "learning_rate": 1.3143238488429049e-05, + "loss": 0.5469, + "step": 854 + }, + { + "epoch": 0.3424, + "grad_norm": 2.4656760692596436, + "learning_rate": 1.316973609677351e-05, + "loss": 0.3558, + "step": 856 + }, + { + "epoch": 0.3432, + "grad_norm": 5.257678985595703, + "learning_rate": 1.319620898687917e-05, + "loss": 0.5961, + "step": 858 + }, + { + "epoch": 0.344, + "grad_norm": 2.2889106273651123, + "learning_rate": 1.32226569523051e-05, + "loss": 0.4341, + "step": 860 + }, + { + "epoch": 0.3448, + "grad_norm": 6.256611347198486, + "learning_rate": 1.324907978680475e-05, + "loss": 0.4521, + "step": 862 + }, + { + "epoch": 0.3456, + "grad_norm": 4.55164909362793, + "learning_rate": 1.3275477284327572e-05, + "loss": 0.4435, + "step": 864 + }, + { + "epoch": 0.3464, + "grad_norm": 3.1022279262542725, + "learning_rate": 1.3301849239020537e-05, + "loss": 0.6874, + "step": 866 + }, + { + "epoch": 0.3472, + "grad_norm": 2.245877981185913, + "learning_rate": 1.3328195445229865e-05, + "loss": 0.5371, + "step": 868 + }, + { + "epoch": 0.348, + "grad_norm": 7.837813377380371, + "learning_rate": 1.3354515697502548e-05, + "loss": 0.5843, + "step": 870 + }, + { + "epoch": 0.3488, + "grad_norm": 6.17982816696167, + "learning_rate": 1.338080979058797e-05, + "loss": 0.5286, + "step": 872 + }, + { + "epoch": 0.3496, + "grad_norm": 11.103405952453613, + "learning_rate": 1.340707751943951e-05, + "loss": 0.6753, + "step": 874 + }, + { + "epoch": 0.3504, + "grad_norm": 14.157888412475586, + "learning_rate": 1.3433318679216145e-05, + "loss": 0.5989, + "step": 876 + }, + { + "epoch": 0.3512, + "grad_norm": 4.318014621734619, + "learning_rate": 1.3459533065284039e-05, + "loss": 0.3676, + "step": 878 + }, + { + "epoch": 0.352, + "grad_norm": 6.474916934967041, + "learning_rate": 1.348572047321814e-05, + "loss": 0.5159, + "step": 880 + }, + { + "epoch": 0.3528, + "grad_norm": 5.441918849945068, + "learning_rate": 1.3511880698803803e-05, + "loss": 0.8862, + "step": 882 + }, + { + "epoch": 0.3536, + "grad_norm": 6.940057277679443, + "learning_rate": 1.3538013538038296e-05, + "loss": 0.6308, + "step": 884 + }, + { + "epoch": 0.3544, + "grad_norm": 3.7186665534973145, + "learning_rate": 1.3564118787132507e-05, + "loss": 0.3685, + "step": 886 + }, + { + "epoch": 0.3552, + "grad_norm": 19.710111618041992, + "learning_rate": 1.3590196242512461e-05, + "loss": 1.2655, + "step": 888 + }, + { + "epoch": 0.356, + "grad_norm": 2.533984899520874, + "learning_rate": 1.361624570082092e-05, + "loss": 0.3519, + "step": 890 + }, + { + "epoch": 0.3568, + "grad_norm": 11.8716402053833, + "learning_rate": 1.364226695891898e-05, + "loss": 0.7045, + "step": 892 + }, + { + "epoch": 0.3576, + "grad_norm": 2.3505406379699707, + "learning_rate": 1.3668259813887637e-05, + "loss": 0.492, + "step": 894 + }, + { + "epoch": 0.3584, + "grad_norm": 4.525683403015137, + "learning_rate": 1.3694224063029386e-05, + "loss": 0.475, + "step": 896 + }, + { + "epoch": 0.3592, + "grad_norm": 7.988845348358154, + "learning_rate": 1.3720159503869806e-05, + "loss": 0.3732, + "step": 898 + }, + { + "epoch": 0.36, + "grad_norm": 4.825455665588379, + "learning_rate": 1.374606593415911e-05, + "loss": 0.4227, + "step": 900 + }, + { + "epoch": 0.3608, + "grad_norm": 2.447044610977173, + "learning_rate": 1.377194315187377e-05, + "loss": 0.5101, + "step": 902 + }, + { + "epoch": 0.3616, + "grad_norm": 0.4026242792606354, + "learning_rate": 1.3797790955218014e-05, + "loss": 0.3221, + "step": 904 + }, + { + "epoch": 0.3624, + "grad_norm": 7.215357780456543, + "learning_rate": 1.3823609142625492e-05, + "loss": 0.8086, + "step": 906 + }, + { + "epoch": 0.3632, + "grad_norm": 3.070342779159546, + "learning_rate": 1.3849397512760793e-05, + "loss": 0.5274, + "step": 908 + }, + { + "epoch": 0.364, + "grad_norm": 4.354795455932617, + "learning_rate": 1.3875155864521027e-05, + "loss": 0.4501, + "step": 910 + }, + { + "epoch": 0.3648, + "grad_norm": 4.162818908691406, + "learning_rate": 1.3900883997037393e-05, + "loss": 0.2185, + "step": 912 + }, + { + "epoch": 0.3656, + "grad_norm": 1.5700749158859253, + "learning_rate": 1.3926581709676746e-05, + "loss": 0.288, + "step": 914 + }, + { + "epoch": 0.3664, + "grad_norm": 4.362253189086914, + "learning_rate": 1.3952248802043158e-05, + "loss": 0.3545, + "step": 916 + }, + { + "epoch": 0.3672, + "grad_norm": 5.418710231781006, + "learning_rate": 1.397788507397949e-05, + "loss": 0.4328, + "step": 918 + }, + { + "epoch": 0.368, + "grad_norm": 6.52181339263916, + "learning_rate": 1.4003490325568956e-05, + "loss": 0.3501, + "step": 920 + }, + { + "epoch": 0.3688, + "grad_norm": 4.86818265914917, + "learning_rate": 1.4029064357136632e-05, + "loss": 0.4529, + "step": 922 + }, + { + "epoch": 0.3696, + "grad_norm": 6.881949424743652, + "learning_rate": 1.4054606969251096e-05, + "loss": 0.5725, + "step": 924 + }, + { + "epoch": 0.3704, + "grad_norm": 3.368731737136841, + "learning_rate": 1.4080117962725929e-05, + "loss": 0.4468, + "step": 926 + }, + { + "epoch": 0.3712, + "grad_norm": 3.010711908340454, + "learning_rate": 1.4105597138621281e-05, + "loss": 0.4431, + "step": 928 + }, + { + "epoch": 0.372, + "grad_norm": 8.769145011901855, + "learning_rate": 1.4131044298245416e-05, + "loss": 0.5087, + "step": 930 + }, + { + "epoch": 0.3728, + "grad_norm": 7.3292975425720215, + "learning_rate": 1.4156459243156275e-05, + "loss": 0.4583, + "step": 932 + }, + { + "epoch": 0.3736, + "grad_norm": 2.95186448097229, + "learning_rate": 1.418184177516301e-05, + "loss": 0.4141, + "step": 934 + }, + { + "epoch": 0.3744, + "grad_norm": 3.9545629024505615, + "learning_rate": 1.420719169632754e-05, + "loss": 0.3136, + "step": 936 + }, + { + "epoch": 0.3752, + "grad_norm": 8.807525634765625, + "learning_rate": 1.4232508808966085e-05, + "loss": 0.792, + "step": 938 + }, + { + "epoch": 0.376, + "grad_norm": 3.8343663215637207, + "learning_rate": 1.4257792915650735e-05, + "loss": 0.4076, + "step": 940 + }, + { + "epoch": 0.3768, + "grad_norm": 3.995673179626465, + "learning_rate": 1.4283043819210906e-05, + "loss": 0.5437, + "step": 942 + }, + { + "epoch": 0.3776, + "grad_norm": 5.492018222808838, + "learning_rate": 1.430826132273499e-05, + "loss": 0.4751, + "step": 944 + }, + { + "epoch": 0.3784, + "grad_norm": 6.859824180603027, + "learning_rate": 1.4333445229571857e-05, + "loss": 0.6557, + "step": 946 + }, + { + "epoch": 0.3792, + "grad_norm": 8.421869277954102, + "learning_rate": 1.4358595343332342e-05, + "loss": 0.7003, + "step": 948 + }, + { + "epoch": 0.38, + "grad_norm": 3.7867302894592285, + "learning_rate": 1.4383711467890772e-05, + "loss": 0.2215, + "step": 950 + }, + { + "epoch": 0.3808, + "grad_norm": 8.494775772094727, + "learning_rate": 1.4408793407386584e-05, + "loss": 0.5345, + "step": 952 + }, + { + "epoch": 0.3816, + "grad_norm": 3.8272619247436523, + "learning_rate": 1.4433840966225767e-05, + "loss": 0.5162, + "step": 954 + }, + { + "epoch": 0.3824, + "grad_norm": 6.869154930114746, + "learning_rate": 1.4458853949082434e-05, + "loss": 0.7501, + "step": 956 + }, + { + "epoch": 0.3832, + "grad_norm": 3.7528223991394043, + "learning_rate": 1.4483832160900332e-05, + "loss": 0.2814, + "step": 958 + }, + { + "epoch": 0.384, + "grad_norm": 12.88850212097168, + "learning_rate": 1.4508775406894315e-05, + "loss": 0.6589, + "step": 960 + }, + { + "epoch": 0.3848, + "grad_norm": 5.3597588539123535, + "learning_rate": 1.4533683492551942e-05, + "loss": 0.1747, + "step": 962 + }, + { + "epoch": 0.3856, + "grad_norm": 4.822731971740723, + "learning_rate": 1.4558556223634988e-05, + "loss": 0.3057, + "step": 964 + }, + { + "epoch": 0.3864, + "grad_norm": 6.9705891609191895, + "learning_rate": 1.4583393406180886e-05, + "loss": 0.4439, + "step": 966 + }, + { + "epoch": 0.3872, + "grad_norm": 7.96857213973999, + "learning_rate": 1.460819484650431e-05, + "loss": 0.586, + "step": 968 + }, + { + "epoch": 0.388, + "grad_norm": 5.491257667541504, + "learning_rate": 1.4632960351198618e-05, + "loss": 0.5213, + "step": 970 + }, + { + "epoch": 0.3888, + "grad_norm": 7.354158878326416, + "learning_rate": 1.4657689727137441e-05, + "loss": 0.4731, + "step": 972 + }, + { + "epoch": 0.3896, + "grad_norm": 8.442399978637695, + "learning_rate": 1.468238278147614e-05, + "loss": 0.5359, + "step": 974 + }, + { + "epoch": 0.3904, + "grad_norm": 1.9848921298980713, + "learning_rate": 1.470703932165332e-05, + "loss": 0.341, + "step": 976 + }, + { + "epoch": 0.3912, + "grad_norm": 2.058201551437378, + "learning_rate": 1.4731659155392339e-05, + "loss": 0.3913, + "step": 978 + }, + { + "epoch": 0.392, + "grad_norm": 8.525607109069824, + "learning_rate": 1.4756242090702744e-05, + "loss": 0.5909, + "step": 980 + }, + { + "epoch": 0.3928, + "grad_norm": 2.9162418842315674, + "learning_rate": 1.4780787935881913e-05, + "loss": 0.3393, + "step": 982 + }, + { + "epoch": 0.3936, + "grad_norm": 2.995479106903076, + "learning_rate": 1.4805296499516397e-05, + "loss": 0.5719, + "step": 984 + }, + { + "epoch": 0.3944, + "grad_norm": 8.575796127319336, + "learning_rate": 1.482976759048351e-05, + "loss": 0.619, + "step": 986 + }, + { + "epoch": 0.3952, + "grad_norm": 2.8806512355804443, + "learning_rate": 1.485420101795274e-05, + "loss": 0.3831, + "step": 988 + }, + { + "epoch": 0.396, + "grad_norm": 6.711476802825928, + "learning_rate": 1.4878596591387327e-05, + "loss": 0.4079, + "step": 990 + }, + { + "epoch": 0.3968, + "grad_norm": 3.724522590637207, + "learning_rate": 1.4902954120545686e-05, + "loss": 0.7505, + "step": 992 + }, + { + "epoch": 0.3976, + "grad_norm": 3.5688815116882324, + "learning_rate": 1.4927273415482913e-05, + "loss": 0.2601, + "step": 994 + }, + { + "epoch": 0.3984, + "grad_norm": 5.003245830535889, + "learning_rate": 1.4951554286552261e-05, + "loss": 0.3731, + "step": 996 + }, + { + "epoch": 0.3992, + "grad_norm": 8.481842041015625, + "learning_rate": 1.4975796544406617e-05, + "loss": 0.7391, + "step": 998 + }, + { + "epoch": 0.4, + "grad_norm": 4.910562038421631, + "learning_rate": 1.4999999999999992e-05, + "loss": 0.4004, + "step": 1000 + }, + { + "epoch": 0.4008, + "grad_norm": 12.834502220153809, + "learning_rate": 1.502416446458897e-05, + "loss": 1.0613, + "step": 1002 + }, + { + "epoch": 0.4016, + "grad_norm": 6.666214942932129, + "learning_rate": 1.5048289749734206e-05, + "loss": 0.594, + "step": 1004 + }, + { + "epoch": 0.4024, + "grad_norm": 2.4326231479644775, + "learning_rate": 1.5072375667301895e-05, + "loss": 0.3435, + "step": 1006 + }, + { + "epoch": 0.4032, + "grad_norm": 2.314138650894165, + "learning_rate": 1.5096422029465178e-05, + "loss": 0.35, + "step": 1008 + }, + { + "epoch": 0.404, + "grad_norm": 2.2951853275299072, + "learning_rate": 1.5120428648705714e-05, + "loss": 0.2415, + "step": 1010 + }, + { + "epoch": 0.4048, + "grad_norm": 1.675195574760437, + "learning_rate": 1.5144395337815064e-05, + "loss": 0.2919, + "step": 1012 + }, + { + "epoch": 0.4056, + "grad_norm": 9.622923851013184, + "learning_rate": 1.5168321909896166e-05, + "loss": 0.558, + "step": 1014 + }, + { + "epoch": 0.4064, + "grad_norm": 9.771827697753906, + "learning_rate": 1.5192208178364808e-05, + "loss": 0.667, + "step": 1016 + }, + { + "epoch": 0.4072, + "grad_norm": 3.1868371963500977, + "learning_rate": 1.521605395695107e-05, + "loss": 0.3962, + "step": 1018 + }, + { + "epoch": 0.408, + "grad_norm": 3.322626829147339, + "learning_rate": 1.5239859059700784e-05, + "loss": 0.3846, + "step": 1020 + }, + { + "epoch": 0.4088, + "grad_norm": 3.9479050636291504, + "learning_rate": 1.526362330097697e-05, + "loss": 0.3751, + "step": 1022 + }, + { + "epoch": 0.4096, + "grad_norm": 2.961951971054077, + "learning_rate": 1.5287346495461322e-05, + "loss": 0.452, + "step": 1024 + }, + { + "epoch": 0.4104, + "grad_norm": 2.779170274734497, + "learning_rate": 1.531102845815557e-05, + "loss": 0.3655, + "step": 1026 + }, + { + "epoch": 0.4112, + "grad_norm": 9.825654983520508, + "learning_rate": 1.5334669004383025e-05, + "loss": 0.3564, + "step": 1028 + }, + { + "epoch": 0.412, + "grad_norm": 6.186550617218018, + "learning_rate": 1.5358267949789968e-05, + "loss": 0.3361, + "step": 1030 + }, + { + "epoch": 0.4128, + "grad_norm": 8.893967628479004, + "learning_rate": 1.5381825110347072e-05, + "loss": 0.5267, + "step": 1032 + }, + { + "epoch": 0.4136, + "grad_norm": 4.747354507446289, + "learning_rate": 1.540534030235087e-05, + "loss": 0.1701, + "step": 1034 + }, + { + "epoch": 0.4144, + "grad_norm": 7.46083927154541, + "learning_rate": 1.542881334242517e-05, + "loss": 0.488, + "step": 1036 + }, + { + "epoch": 0.4152, + "grad_norm": 5.576011657714844, + "learning_rate": 1.5452244047522493e-05, + "loss": 0.5621, + "step": 1038 + }, + { + "epoch": 0.416, + "grad_norm": 3.370360851287842, + "learning_rate": 1.5475632234925495e-05, + "loss": 0.6699, + "step": 1040 + }, + { + "epoch": 0.4168, + "grad_norm": 5.393393039703369, + "learning_rate": 1.5498977722248388e-05, + "loss": 0.487, + "step": 1042 + }, + { + "epoch": 0.4176, + "grad_norm": 3.2120022773742676, + "learning_rate": 1.552228032743839e-05, + "loss": 0.6391, + "step": 1044 + }, + { + "epoch": 0.4184, + "grad_norm": 3.1691336631774902, + "learning_rate": 1.5545539868777075e-05, + "loss": 0.4345, + "step": 1046 + }, + { + "epoch": 0.4192, + "grad_norm": 2.4558982849121094, + "learning_rate": 1.556875616488188e-05, + "loss": 0.5859, + "step": 1048 + }, + { + "epoch": 0.42, + "grad_norm": 4.1330342292785645, + "learning_rate": 1.5591929034707468e-05, + "loss": 0.3224, + "step": 1050 + }, + { + "epoch": 0.4208, + "grad_norm": 2.89819073677063, + "learning_rate": 1.5615058297547144e-05, + "loss": 0.4887, + "step": 1052 + }, + { + "epoch": 0.4216, + "grad_norm": 4.832616806030273, + "learning_rate": 1.5638143773034268e-05, + "loss": 0.4998, + "step": 1054 + }, + { + "epoch": 0.4224, + "grad_norm": 2.52421236038208, + "learning_rate": 1.5661185281143663e-05, + "loss": 0.367, + "step": 1056 + }, + { + "epoch": 0.4232, + "grad_norm": 2.5860087871551514, + "learning_rate": 1.5684182642193024e-05, + "loss": 0.2247, + "step": 1058 + }, + { + "epoch": 0.424, + "grad_norm": 20.680755615234375, + "learning_rate": 1.5707135676844312e-05, + "loss": 1.0634, + "step": 1060 + }, + { + "epoch": 0.4248, + "grad_norm": 2.4126100540161133, + "learning_rate": 1.5730044206105146e-05, + "loss": 0.2989, + "step": 1062 + }, + { + "epoch": 0.4256, + "grad_norm": 6.855231761932373, + "learning_rate": 1.5752908051330232e-05, + "loss": 0.5926, + "step": 1064 + }, + { + "epoch": 0.4264, + "grad_norm": 11.962879180908203, + "learning_rate": 1.577572703422268e-05, + "loss": 0.5489, + "step": 1066 + }, + { + "epoch": 0.4272, + "grad_norm": 5.586971759796143, + "learning_rate": 1.579850097683548e-05, + "loss": 0.5642, + "step": 1068 + }, + { + "epoch": 0.428, + "grad_norm": 9.240392684936523, + "learning_rate": 1.582122970157288e-05, + "loss": 0.5487, + "step": 1070 + }, + { + "epoch": 0.4288, + "grad_norm": 2.5346271991729736, + "learning_rate": 1.5843913031191722e-05, + "loss": 0.2458, + "step": 1072 + }, + { + "epoch": 0.4296, + "grad_norm": 3.1922338008880615, + "learning_rate": 1.586655078880281e-05, + "loss": 0.3974, + "step": 1074 + }, + { + "epoch": 0.4304, + "grad_norm": 7.212031364440918, + "learning_rate": 1.5889142797872383e-05, + "loss": 0.4685, + "step": 1076 + }, + { + "epoch": 0.4312, + "grad_norm": 2.082321882247925, + "learning_rate": 1.5911688882223415e-05, + "loss": 0.2191, + "step": 1078 + }, + { + "epoch": 0.432, + "grad_norm": 5.929754257202148, + "learning_rate": 1.5934188866037007e-05, + "loss": 0.5255, + "step": 1080 + }, + { + "epoch": 0.4328, + "grad_norm": 4.870511531829834, + "learning_rate": 1.5956642573853787e-05, + "loss": 0.5902, + "step": 1082 + }, + { + "epoch": 0.4336, + "grad_norm": 7.386419296264648, + "learning_rate": 1.5979049830575193e-05, + "loss": 0.346, + "step": 1084 + }, + { + "epoch": 0.4344, + "grad_norm": 3.978145122528076, + "learning_rate": 1.6001410461464945e-05, + "loss": 0.6408, + "step": 1086 + }, + { + "epoch": 0.4352, + "grad_norm": 2.4360148906707764, + "learning_rate": 1.6023724292150377e-05, + "loss": 0.643, + "step": 1088 + }, + { + "epoch": 0.436, + "grad_norm": 2.7286767959594727, + "learning_rate": 1.604599114862375e-05, + "loss": 0.3048, + "step": 1090 + }, + { + "epoch": 0.4368, + "grad_norm": 2.107506036758423, + "learning_rate": 1.606821085724362e-05, + "loss": 0.2779, + "step": 1092 + }, + { + "epoch": 0.4376, + "grad_norm": 8.017492294311523, + "learning_rate": 1.6090383244736253e-05, + "loss": 0.453, + "step": 1094 + }, + { + "epoch": 0.4384, + "grad_norm": 8.654541015625, + "learning_rate": 1.6112508138196912e-05, + "loss": 0.4822, + "step": 1096 + }, + { + "epoch": 0.4392, + "grad_norm": 4.79012393951416, + "learning_rate": 1.613458536509124e-05, + "loss": 0.4431, + "step": 1098 + }, + { + "epoch": 0.44, + "grad_norm": 4.138698101043701, + "learning_rate": 1.615661475325658e-05, + "loss": 0.2924, + "step": 1100 + }, + { + "epoch": 0.4408, + "grad_norm": 7.6140522956848145, + "learning_rate": 1.6178596130903352e-05, + "loss": 0.5564, + "step": 1102 + }, + { + "epoch": 0.4416, + "grad_norm": 5.842023849487305, + "learning_rate": 1.620052932661632e-05, + "loss": 0.2565, + "step": 1104 + }, + { + "epoch": 0.4424, + "grad_norm": 3.1433494091033936, + "learning_rate": 1.6222414169356056e-05, + "loss": 0.5082, + "step": 1106 + }, + { + "epoch": 0.4432, + "grad_norm": 2.6837446689605713, + "learning_rate": 1.6244250488460146e-05, + "loss": 0.4007, + "step": 1108 + }, + { + "epoch": 0.444, + "grad_norm": 3.3919453620910645, + "learning_rate": 1.6266038113644605e-05, + "loss": 0.4995, + "step": 1110 + }, + { + "epoch": 0.4448, + "grad_norm": 2.611570358276367, + "learning_rate": 1.6287776875005127e-05, + "loss": 0.3224, + "step": 1112 + }, + { + "epoch": 0.4456, + "grad_norm": 4.856591701507568, + "learning_rate": 1.6309466603018497e-05, + "loss": 0.4728, + "step": 1114 + }, + { + "epoch": 0.4464, + "grad_norm": 13.449359893798828, + "learning_rate": 1.6331107128543856e-05, + "loss": 0.5741, + "step": 1116 + }, + { + "epoch": 0.4472, + "grad_norm": 2.368253707885742, + "learning_rate": 1.635269828282404e-05, + "loss": 0.561, + "step": 1118 + }, + { + "epoch": 0.448, + "grad_norm": 2.5609967708587646, + "learning_rate": 1.6374239897486905e-05, + "loss": 0.4176, + "step": 1120 + }, + { + "epoch": 0.4488, + "grad_norm": 3.7215967178344727, + "learning_rate": 1.6395731804546575e-05, + "loss": 0.3077, + "step": 1122 + }, + { + "epoch": 0.4496, + "grad_norm": 7.401405334472656, + "learning_rate": 1.6417173836404878e-05, + "loss": 0.3221, + "step": 1124 + }, + { + "epoch": 0.4504, + "grad_norm": 2.0310912132263184, + "learning_rate": 1.643856582585253e-05, + "loss": 0.3205, + "step": 1126 + }, + { + "epoch": 0.4512, + "grad_norm": 9.05074405670166, + "learning_rate": 1.6459907606070513e-05, + "loss": 0.3382, + "step": 1128 + }, + { + "epoch": 0.452, + "grad_norm": 11.440814018249512, + "learning_rate": 1.6481199010631312e-05, + "loss": 0.4869, + "step": 1130 + }, + { + "epoch": 0.4528, + "grad_norm": 2.9795913696289062, + "learning_rate": 1.650243987350029e-05, + "loss": 0.6705, + "step": 1132 + }, + { + "epoch": 0.4536, + "grad_norm": 4.947592258453369, + "learning_rate": 1.652363002903693e-05, + "loss": 0.2737, + "step": 1134 + }, + { + "epoch": 0.4544, + "grad_norm": 6.063323974609375, + "learning_rate": 1.6544769311996146e-05, + "loss": 0.3493, + "step": 1136 + }, + { + "epoch": 0.4552, + "grad_norm": 8.340121269226074, + "learning_rate": 1.656585755752956e-05, + "loss": 0.6582, + "step": 1138 + }, + { + "epoch": 0.456, + "grad_norm": 11.626544952392578, + "learning_rate": 1.65868946011868e-05, + "loss": 0.6541, + "step": 1140 + }, + { + "epoch": 0.4568, + "grad_norm": 4.603865623474121, + "learning_rate": 1.660788027891677e-05, + "loss": 0.6873, + "step": 1142 + }, + { + "epoch": 0.4576, + "grad_norm": 7.3429155349731445, + "learning_rate": 1.6628814427068944e-05, + "loss": 0.6526, + "step": 1144 + }, + { + "epoch": 0.4584, + "grad_norm": 3.518334150314331, + "learning_rate": 1.6649696882394625e-05, + "loss": 0.3181, + "step": 1146 + }, + { + "epoch": 0.4592, + "grad_norm": 5.010440349578857, + "learning_rate": 1.667052748204825e-05, + "loss": 0.4269, + "step": 1148 + }, + { + "epoch": 0.46, + "grad_norm": 4.749119758605957, + "learning_rate": 1.6691306063588583e-05, + "loss": 0.3674, + "step": 1150 + }, + { + "epoch": 0.4608, + "grad_norm": 7.220366954803467, + "learning_rate": 1.6712032464980094e-05, + "loss": 0.2894, + "step": 1152 + }, + { + "epoch": 0.4616, + "grad_norm": 2.52457857131958, + "learning_rate": 1.6732706524594138e-05, + "loss": 0.472, + "step": 1154 + }, + { + "epoch": 0.4624, + "grad_norm": 1.485608458518982, + "learning_rate": 1.6753328081210244e-05, + "loss": 0.1831, + "step": 1156 + }, + { + "epoch": 0.4632, + "grad_norm": 4.405038356781006, + "learning_rate": 1.6773896974017373e-05, + "loss": 0.5844, + "step": 1158 + }, + { + "epoch": 0.464, + "grad_norm": 8.147605895996094, + "learning_rate": 1.679441304261516e-05, + "loss": 0.48, + "step": 1160 + }, + { + "epoch": 0.4648, + "grad_norm": 4.937817573547363, + "learning_rate": 1.681487612701519e-05, + "loss": 0.5161, + "step": 1162 + }, + { + "epoch": 0.4656, + "grad_norm": 5.370089530944824, + "learning_rate": 1.683528606764222e-05, + "loss": 0.5149, + "step": 1164 + }, + { + "epoch": 0.4664, + "grad_norm": 8.323260307312012, + "learning_rate": 1.6855642705335428e-05, + "loss": 0.8392, + "step": 1166 + }, + { + "epoch": 0.4672, + "grad_norm": 2.0059239864349365, + "learning_rate": 1.687594588134968e-05, + "loss": 0.3356, + "step": 1168 + }, + { + "epoch": 0.468, + "grad_norm": 4.255488872528076, + "learning_rate": 1.68961954373567e-05, + "loss": 0.4183, + "step": 1170 + }, + { + "epoch": 0.4688, + "grad_norm": 7.48425817489624, + "learning_rate": 1.6916391215446403e-05, + "loss": 0.5642, + "step": 1172 + }, + { + "epoch": 0.4696, + "grad_norm": 6.062568664550781, + "learning_rate": 1.693653305812805e-05, + "loss": 0.4021, + "step": 1174 + }, + { + "epoch": 0.4704, + "grad_norm": 6.629954814910889, + "learning_rate": 1.6956620808331505e-05, + "loss": 2.9714, + "step": 1176 + }, + { + "epoch": 0.4712, + "grad_norm": 4.56139612197876, + "learning_rate": 1.697665430940846e-05, + "loss": 0.4547, + "step": 1178 + }, + { + "epoch": 0.472, + "grad_norm": 6.164916038513184, + "learning_rate": 1.699663340513365e-05, + "loss": 0.7382, + "step": 1180 + }, + { + "epoch": 0.4728, + "grad_norm": 4.28443717956543, + "learning_rate": 1.7016557939706068e-05, + "loss": 0.5405, + "step": 1182 + }, + { + "epoch": 0.4736, + "grad_norm": 9.564210891723633, + "learning_rate": 1.7036427757750198e-05, + "loss": 0.641, + "step": 1184 + }, + { + "epoch": 0.4744, + "grad_norm": 4.702108860015869, + "learning_rate": 1.7056242704317212e-05, + "loss": 0.4587, + "step": 1186 + }, + { + "epoch": 0.4752, + "grad_norm": 6.514930248260498, + "learning_rate": 1.7076002624886156e-05, + "loss": 0.4304, + "step": 1188 + }, + { + "epoch": 0.476, + "grad_norm": 7.816126823425293, + "learning_rate": 1.709570736536521e-05, + "loss": 0.5538, + "step": 1190 + }, + { + "epoch": 0.4768, + "grad_norm": 4.538508415222168, + "learning_rate": 1.7115356772092844e-05, + "loss": 0.3398, + "step": 1192 + }, + { + "epoch": 0.4776, + "grad_norm": 7.519460678100586, + "learning_rate": 1.7134950691839063e-05, + "loss": 0.5039, + "step": 1194 + }, + { + "epoch": 0.4784, + "grad_norm": 3.8620688915252686, + "learning_rate": 1.7154488971806518e-05, + "loss": 0.4347, + "step": 1196 + }, + { + "epoch": 0.4792, + "grad_norm": 2.2250876426696777, + "learning_rate": 1.7173971459631783e-05, + "loss": 0.4299, + "step": 1198 + }, + { + "epoch": 0.48, + "grad_norm": 1.6023005247116089, + "learning_rate": 1.7193398003386507e-05, + "loss": 0.3742, + "step": 1200 + }, + { + "epoch": 0.4808, + "grad_norm": 8.703466415405273, + "learning_rate": 1.7212768451578602e-05, + "loss": 0.5665, + "step": 1202 + }, + { + "epoch": 0.4816, + "grad_norm": 2.355034351348877, + "learning_rate": 1.7232082653153416e-05, + "loss": 0.3028, + "step": 1204 + }, + { + "epoch": 0.4824, + "grad_norm": 2.3913300037384033, + "learning_rate": 1.7251340457494937e-05, + "loss": 0.2936, + "step": 1206 + }, + { + "epoch": 0.4832, + "grad_norm": 2.8259389400482178, + "learning_rate": 1.7270541714426923e-05, + "loss": 0.2899, + "step": 1208 + }, + { + "epoch": 0.484, + "grad_norm": 11.542742729187012, + "learning_rate": 1.7289686274214106e-05, + "loss": 1.6536, + "step": 1210 + }, + { + "epoch": 0.4848, + "grad_norm": 8.168238639831543, + "learning_rate": 1.7308773987563393e-05, + "loss": 0.7052, + "step": 1212 + }, + { + "epoch": 0.4856, + "grad_norm": 6.536013603210449, + "learning_rate": 1.732780470562496e-05, + "loss": 0.7356, + "step": 1214 + }, + { + "epoch": 0.4864, + "grad_norm": 6.662909984588623, + "learning_rate": 1.7346778279993413e-05, + "loss": 0.7118, + "step": 1216 + }, + { + "epoch": 0.4872, + "grad_norm": 5.744428634643555, + "learning_rate": 1.736569456270903e-05, + "loss": 0.4607, + "step": 1218 + }, + { + "epoch": 0.488, + "grad_norm": 3.2977943420410156, + "learning_rate": 1.7384553406258836e-05, + "loss": 0.6452, + "step": 1220 + }, + { + "epoch": 0.4888, + "grad_norm": 7.264194011688232, + "learning_rate": 1.740335466357778e-05, + "loss": 0.4696, + "step": 1222 + }, + { + "epoch": 0.4896, + "grad_norm": 9.938541412353516, + "learning_rate": 1.7422098188049888e-05, + "loss": 0.6597, + "step": 1224 + }, + { + "epoch": 0.4904, + "grad_norm": 4.108887195587158, + "learning_rate": 1.7440783833509373e-05, + "loss": 0.2282, + "step": 1226 + }, + { + "epoch": 0.4912, + "grad_norm": 3.81720232963562, + "learning_rate": 1.7459411454241816e-05, + "loss": 0.3304, + "step": 1228 + }, + { + "epoch": 0.492, + "grad_norm": 8.632997512817383, + "learning_rate": 1.747798090498531e-05, + "loss": 0.4577, + "step": 1230 + }, + { + "epoch": 0.4928, + "grad_norm": 4.479432106018066, + "learning_rate": 1.749649204093154e-05, + "loss": 0.5919, + "step": 1232 + }, + { + "epoch": 0.4936, + "grad_norm": 7.987699031829834, + "learning_rate": 1.7514944717726962e-05, + "loss": 1.0532, + "step": 1234 + }, + { + "epoch": 0.4944, + "grad_norm": 3.5861763954162598, + "learning_rate": 1.753333879147387e-05, + "loss": 0.3997, + "step": 1236 + }, + { + "epoch": 0.4952, + "grad_norm": 8.19918155670166, + "learning_rate": 1.755167411873159e-05, + "loss": 0.5248, + "step": 1238 + }, + { + "epoch": 0.496, + "grad_norm": 0.6573111414909363, + "learning_rate": 1.7569950556517563e-05, + "loss": 0.1645, + "step": 1240 + }, + { + "epoch": 0.4968, + "grad_norm": 3.7899794578552246, + "learning_rate": 1.758816796230845e-05, + "loss": 0.6108, + "step": 1242 + }, + { + "epoch": 0.4976, + "grad_norm": 6.53424596786499, + "learning_rate": 1.7606326194041278e-05, + "loss": 0.5721, + "step": 1244 + }, + { + "epoch": 0.4984, + "grad_norm": 6.82229471206665, + "learning_rate": 1.762442511011447e-05, + "loss": 0.6002, + "step": 1246 + }, + { + "epoch": 0.4992, + "grad_norm": 10.31006908416748, + "learning_rate": 1.7642464569389083e-05, + "loss": 0.7883, + "step": 1248 + }, + { + "epoch": 0.5, + "grad_norm": 4.102840900421143, + "learning_rate": 1.766044443118977e-05, + "loss": 0.3058, + "step": 1250 + }, + { + "epoch": 0.5008, + "grad_norm": 13.063787460327148, + "learning_rate": 1.767836455530598e-05, + "loss": 0.7589, + "step": 1252 + }, + { + "epoch": 0.5016, + "grad_norm": 12.737040519714355, + "learning_rate": 1.7696224801992947e-05, + "loss": 0.879, + "step": 1254 + }, + { + "epoch": 0.5024, + "grad_norm": 5.0701751708984375, + "learning_rate": 1.77140250319729e-05, + "loss": 0.6462, + "step": 1256 + }, + { + "epoch": 0.5032, + "grad_norm": 6.491806507110596, + "learning_rate": 1.7731765106436073e-05, + "loss": 0.4422, + "step": 1258 + }, + { + "epoch": 0.504, + "grad_norm": 7.690762042999268, + "learning_rate": 1.7749444887041793e-05, + "loss": 0.6886, + "step": 1260 + }, + { + "epoch": 0.5048, + "grad_norm": 2.4480361938476562, + "learning_rate": 1.776706423591959e-05, + "loss": 0.3819, + "step": 1262 + }, + { + "epoch": 0.5056, + "grad_norm": 6.522246837615967, + "learning_rate": 1.778462301567023e-05, + "loss": 1.1178, + "step": 1264 + }, + { + "epoch": 0.5064, + "grad_norm": 7.040022373199463, + "learning_rate": 1.7802121089366832e-05, + "loss": 0.6745, + "step": 1266 + }, + { + "epoch": 0.5072, + "grad_norm": 5.24992036819458, + "learning_rate": 1.7819558320555895e-05, + "loss": 0.4028, + "step": 1268 + }, + { + "epoch": 0.508, + "grad_norm": 2.2948031425476074, + "learning_rate": 1.7836934573258392e-05, + "loss": 0.3027, + "step": 1270 + }, + { + "epoch": 0.5088, + "grad_norm": 2.4775025844573975, + "learning_rate": 1.785424971197082e-05, + "loss": 0.3507, + "step": 1272 + }, + { + "epoch": 0.5096, + "grad_norm": 3.7970142364501953, + "learning_rate": 1.7871503601666233e-05, + "loss": 0.591, + "step": 1274 + }, + { + "epoch": 0.5104, + "grad_norm": 8.241109848022461, + "learning_rate": 1.7888696107795343e-05, + "loss": 0.6565, + "step": 1276 + }, + { + "epoch": 0.5112, + "grad_norm": 8.135817527770996, + "learning_rate": 1.790582709628753e-05, + "loss": 0.6159, + "step": 1278 + }, + { + "epoch": 0.512, + "grad_norm": 4.600522518157959, + "learning_rate": 1.7922896433551903e-05, + "loss": 0.3696, + "step": 1280 + }, + { + "epoch": 0.5128, + "grad_norm": 4.542240142822266, + "learning_rate": 1.793990398647835e-05, + "loss": 0.4028, + "step": 1282 + }, + { + "epoch": 0.5136, + "grad_norm": 2.846844434738159, + "learning_rate": 1.795684962243855e-05, + "loss": 0.3901, + "step": 1284 + }, + { + "epoch": 0.5144, + "grad_norm": 2.5168137550354004, + "learning_rate": 1.7973733209287032e-05, + "loss": 0.3155, + "step": 1286 + }, + { + "epoch": 0.5152, + "grad_norm": 7.11170768737793, + "learning_rate": 1.7990554615362193e-05, + "loss": 0.4178, + "step": 1288 + }, + { + "epoch": 0.516, + "grad_norm": 2.3307881355285645, + "learning_rate": 1.800731370948734e-05, + "loss": 0.4979, + "step": 1290 + }, + { + "epoch": 0.5168, + "grad_norm": 14.980961799621582, + "learning_rate": 1.802401036097167e-05, + "loss": 0.6443, + "step": 1292 + }, + { + "epoch": 0.5176, + "grad_norm": 2.6857290267944336, + "learning_rate": 1.804064443961135e-05, + "loss": 0.4083, + "step": 1294 + }, + { + "epoch": 0.5184, + "grad_norm": 5.365575790405273, + "learning_rate": 1.8057215815690494e-05, + "loss": 0.3637, + "step": 1296 + }, + { + "epoch": 0.5192, + "grad_norm": 2.6065447330474854, + "learning_rate": 1.8073724359982184e-05, + "loss": 0.1999, + "step": 1298 + }, + { + "epoch": 0.52, + "grad_norm": 4.7862114906311035, + "learning_rate": 1.809016994374947e-05, + "loss": 0.2711, + "step": 1300 + }, + { + "epoch": 0.5208, + "grad_norm": 3.6769278049468994, + "learning_rate": 1.81065524387464e-05, + "loss": 0.3093, + "step": 1302 + }, + { + "epoch": 0.5216, + "grad_norm": 20.27410888671875, + "learning_rate": 1.8122871717218968e-05, + "loss": 0.5956, + "step": 1304 + }, + { + "epoch": 0.5224, + "grad_norm": 15.000691413879395, + "learning_rate": 1.8139127651906176e-05, + "loss": 0.7266, + "step": 1306 + }, + { + "epoch": 0.5232, + "grad_norm": 8.50636100769043, + "learning_rate": 1.8155320116040976e-05, + "loss": 1.346, + "step": 1308 + }, + { + "epoch": 0.524, + "grad_norm": 2.9468233585357666, + "learning_rate": 1.817144898335129e-05, + "loss": 0.2572, + "step": 1310 + }, + { + "epoch": 0.5248, + "grad_norm": 12.458033561706543, + "learning_rate": 1.818751412806095e-05, + "loss": 0.5091, + "step": 1312 + }, + { + "epoch": 0.5256, + "grad_norm": 4.08944034576416, + "learning_rate": 1.8203515424890738e-05, + "loss": 0.3258, + "step": 1314 + }, + { + "epoch": 0.5264, + "grad_norm": 6.3507819175720215, + "learning_rate": 1.8219452749059322e-05, + "loss": 0.6816, + "step": 1316 + }, + { + "epoch": 0.5272, + "grad_norm": 3.709740161895752, + "learning_rate": 1.8235325976284276e-05, + "loss": 0.3486, + "step": 1318 + }, + { + "epoch": 0.528, + "grad_norm": 2.184805154800415, + "learning_rate": 1.8251134982782952e-05, + "loss": 0.2956, + "step": 1320 + }, + { + "epoch": 0.5288, + "grad_norm": 3.21848726272583, + "learning_rate": 1.826687964527355e-05, + "loss": 0.4955, + "step": 1322 + }, + { + "epoch": 0.5296, + "grad_norm": 11.279875755310059, + "learning_rate": 1.828255984097604e-05, + "loss": 0.5082, + "step": 1324 + }, + { + "epoch": 0.5304, + "grad_norm": 4.378537178039551, + "learning_rate": 1.8298175447613093e-05, + "loss": 0.3311, + "step": 1326 + }, + { + "epoch": 0.5312, + "grad_norm": 5.776856422424316, + "learning_rate": 1.8313726343411092e-05, + "loss": 0.3442, + "step": 1328 + }, + { + "epoch": 0.532, + "grad_norm": 2.2048215866088867, + "learning_rate": 1.8329212407101e-05, + "loss": 0.3682, + "step": 1330 + }, + { + "epoch": 0.5328, + "grad_norm": 2.4243111610412598, + "learning_rate": 1.8344633517919394e-05, + "loss": 0.4139, + "step": 1332 + }, + { + "epoch": 0.5336, + "grad_norm": 8.74450397491455, + "learning_rate": 1.8359989555609344e-05, + "loss": 0.4095, + "step": 1334 + }, + { + "epoch": 0.5344, + "grad_norm": 8.038501739501953, + "learning_rate": 1.8375280400421407e-05, + "loss": 0.7819, + "step": 1336 + }, + { + "epoch": 0.5352, + "grad_norm": 7.184566974639893, + "learning_rate": 1.8390505933114503e-05, + "loss": 0.3759, + "step": 1338 + }, + { + "epoch": 0.536, + "grad_norm": 7.550333499908447, + "learning_rate": 1.8405666034956842e-05, + "loss": 1.3611, + "step": 1340 + }, + { + "epoch": 0.5368, + "grad_norm": 9.062027931213379, + "learning_rate": 1.842076058772692e-05, + "loss": 0.6528, + "step": 1342 + }, + { + "epoch": 0.5376, + "grad_norm": 4.2818708419799805, + "learning_rate": 1.8435789473714384e-05, + "loss": 0.9378, + "step": 1344 + }, + { + "epoch": 0.5384, + "grad_norm": 7.2454938888549805, + "learning_rate": 1.8450752575720964e-05, + "loss": 0.2129, + "step": 1346 + }, + { + "epoch": 0.5392, + "grad_norm": 2.8972926139831543, + "learning_rate": 1.8465649777061384e-05, + "loss": 0.4785, + "step": 1348 + }, + { + "epoch": 0.54, + "grad_norm": 7.637453556060791, + "learning_rate": 1.8480480961564266e-05, + "loss": 0.7323, + "step": 1350 + }, + { + "epoch": 0.5408, + "grad_norm": 3.591602325439453, + "learning_rate": 1.8495246013573047e-05, + "loss": 0.4019, + "step": 1352 + }, + { + "epoch": 0.5416, + "grad_norm": 8.564355850219727, + "learning_rate": 1.850994481794691e-05, + "loss": 0.5674, + "step": 1354 + }, + { + "epoch": 0.5424, + "grad_norm": 11.854432106018066, + "learning_rate": 1.8524577260061628e-05, + "loss": 0.5302, + "step": 1356 + }, + { + "epoch": 0.5432, + "grad_norm": 4.832673072814941, + "learning_rate": 1.8539143225810453e-05, + "loss": 0.397, + "step": 1358 + }, + { + "epoch": 0.544, + "grad_norm": 6.154983043670654, + "learning_rate": 1.8553642601605066e-05, + "loss": 0.8572, + "step": 1360 + }, + { + "epoch": 0.5448, + "grad_norm": 4.938594341278076, + "learning_rate": 1.856807527437643e-05, + "loss": 0.6868, + "step": 1362 + }, + { + "epoch": 0.5456, + "grad_norm": 5.232631206512451, + "learning_rate": 1.8582441131575658e-05, + "loss": 0.5783, + "step": 1364 + }, + { + "epoch": 0.5464, + "grad_norm": 5.804172039031982, + "learning_rate": 1.859674006117491e-05, + "loss": 0.3358, + "step": 1366 + }, + { + "epoch": 0.5472, + "grad_norm": 2.756375789642334, + "learning_rate": 1.8610971951668268e-05, + "loss": 0.396, + "step": 1368 + }, + { + "epoch": 0.548, + "grad_norm": 1.8918120861053467, + "learning_rate": 1.862513669207257e-05, + "loss": 0.2675, + "step": 1370 + }, + { + "epoch": 0.5488, + "grad_norm": 4.279423236846924, + "learning_rate": 1.8639234171928348e-05, + "loss": 0.2105, + "step": 1372 + }, + { + "epoch": 0.5496, + "grad_norm": 14.454187393188477, + "learning_rate": 1.8653264281300612e-05, + "loss": 0.7139, + "step": 1374 + }, + { + "epoch": 0.5504, + "grad_norm": 8.212747573852539, + "learning_rate": 1.8667226910779767e-05, + "loss": 0.4805, + "step": 1376 + }, + { + "epoch": 0.5512, + "grad_norm": 1.839599847793579, + "learning_rate": 1.8681121951482393e-05, + "loss": 0.3243, + "step": 1378 + }, + { + "epoch": 0.552, + "grad_norm": 8.92973518371582, + "learning_rate": 1.869494929505219e-05, + "loss": 1.4886, + "step": 1380 + }, + { + "epoch": 0.5528, + "grad_norm": 9.111851692199707, + "learning_rate": 1.870870883366075e-05, + "loss": 1.0078, + "step": 1382 + }, + { + "epoch": 0.5536, + "grad_norm": 2.5603065490722656, + "learning_rate": 1.8722400460008434e-05, + "loss": 0.2356, + "step": 1384 + }, + { + "epoch": 0.5544, + "grad_norm": 2.080918312072754, + "learning_rate": 1.8736024067325195e-05, + "loss": 0.3798, + "step": 1386 + }, + { + "epoch": 0.5552, + "grad_norm": 2.699968099594116, + "learning_rate": 1.8749579549371373e-05, + "loss": 0.4191, + "step": 1388 + }, + { + "epoch": 0.556, + "grad_norm": 5.318063259124756, + "learning_rate": 1.876306680043863e-05, + "loss": 0.3578, + "step": 1390 + }, + { + "epoch": 0.5568, + "grad_norm": 3.0889317989349365, + "learning_rate": 1.8776485715350665e-05, + "loss": 0.3083, + "step": 1392 + }, + { + "epoch": 0.5576, + "grad_norm": 9.75284194946289, + "learning_rate": 1.878983618946409e-05, + "loss": 0.5724, + "step": 1394 + }, + { + "epoch": 0.5584, + "grad_norm": 2.678609848022461, + "learning_rate": 1.8803118118669203e-05, + "loss": 0.287, + "step": 1396 + }, + { + "epoch": 0.5592, + "grad_norm": 1.7660149335861206, + "learning_rate": 1.881633139939087e-05, + "loss": 0.2172, + "step": 1398 + }, + { + "epoch": 0.56, + "grad_norm": 2.460742950439453, + "learning_rate": 1.882947592858927e-05, + "loss": 0.3789, + "step": 1400 + }, + { + "epoch": 0.5608, + "grad_norm": 2.597334146499634, + "learning_rate": 1.884255160376072e-05, + "loss": 0.5009, + "step": 1402 + }, + { + "epoch": 0.5616, + "grad_norm": 2.7347497940063477, + "learning_rate": 1.885555832293849e-05, + "loss": 0.3827, + "step": 1404 + }, + { + "epoch": 0.5624, + "grad_norm": 11.216575622558594, + "learning_rate": 1.886849598469356e-05, + "loss": 1.3072, + "step": 1406 + }, + { + "epoch": 0.5632, + "grad_norm": 1.3659820556640625, + "learning_rate": 1.888136448813544e-05, + "loss": 0.2031, + "step": 1408 + }, + { + "epoch": 0.564, + "grad_norm": 6.435552597045898, + "learning_rate": 1.8894163732912972e-05, + "loss": 0.3993, + "step": 1410 + }, + { + "epoch": 0.5648, + "grad_norm": 6.237542629241943, + "learning_rate": 1.890689361921506e-05, + "loss": 0.3356, + "step": 1412 + }, + { + "epoch": 0.5656, + "grad_norm": 7.920870780944824, + "learning_rate": 1.891955404777151e-05, + "loss": 0.4875, + "step": 1414 + }, + { + "epoch": 0.5664, + "grad_norm": 8.32780933380127, + "learning_rate": 1.893214491985374e-05, + "loss": 0.4395, + "step": 1416 + }, + { + "epoch": 0.5672, + "grad_norm": 5.75148868560791, + "learning_rate": 1.89446661372756e-05, + "loss": 0.384, + "step": 1418 + }, + { + "epoch": 0.568, + "grad_norm": 6.593601226806641, + "learning_rate": 1.895711760239413e-05, + "loss": 0.4153, + "step": 1420 + }, + { + "epoch": 0.5688, + "grad_norm": 5.3204474449157715, + "learning_rate": 1.89694992181103e-05, + "loss": 0.5154, + "step": 1422 + }, + { + "epoch": 0.5696, + "grad_norm": 7.025198459625244, + "learning_rate": 1.8981810887869784e-05, + "loss": 0.3926, + "step": 1424 + }, + { + "epoch": 0.5704, + "grad_norm": 3.4113192558288574, + "learning_rate": 1.8994052515663708e-05, + "loss": 0.4192, + "step": 1426 + }, + { + "epoch": 0.5712, + "grad_norm": 2.7692418098449707, + "learning_rate": 1.90062240060294e-05, + "loss": 0.4204, + "step": 1428 + }, + { + "epoch": 0.572, + "grad_norm": 7.360057830810547, + "learning_rate": 1.9018325264051136e-05, + "loss": 0.3961, + "step": 1430 + }, + { + "epoch": 0.5728, + "grad_norm": 13.831832885742188, + "learning_rate": 1.9030356195360868e-05, + "loss": 0.7024, + "step": 1432 + }, + { + "epoch": 0.5736, + "grad_norm": 3.6427500247955322, + "learning_rate": 1.904231670613899e-05, + "loss": 0.2966, + "step": 1434 + }, + { + "epoch": 0.5744, + "grad_norm": 6.612185478210449, + "learning_rate": 1.905420670311502e-05, + "loss": 0.6514, + "step": 1436 + }, + { + "epoch": 0.5752, + "grad_norm": 7.35181188583374, + "learning_rate": 1.906602609356838e-05, + "loss": 0.6473, + "step": 1438 + }, + { + "epoch": 0.576, + "grad_norm": 7.588043689727783, + "learning_rate": 1.9077774785329078e-05, + "loss": 0.5851, + "step": 1440 + }, + { + "epoch": 0.5768, + "grad_norm": 5.37501859664917, + "learning_rate": 1.9089452686778487e-05, + "loss": 0.4444, + "step": 1442 + }, + { + "epoch": 0.5776, + "grad_norm": 2.9378836154937744, + "learning_rate": 1.9101059706849957e-05, + "loss": 0.5089, + "step": 1444 + }, + { + "epoch": 0.5784, + "grad_norm": 7.634711742401123, + "learning_rate": 1.911259575502962e-05, + "loss": 0.5481, + "step": 1446 + }, + { + "epoch": 0.5792, + "grad_norm": 3.4533517360687256, + "learning_rate": 1.912406074135706e-05, + "loss": 0.3859, + "step": 1448 + }, + { + "epoch": 0.58, + "grad_norm": 7.150989055633545, + "learning_rate": 1.9135454576426006e-05, + "loss": 0.4401, + "step": 1450 + }, + { + "epoch": 0.5808, + "grad_norm": 15.217089653015137, + "learning_rate": 1.9146777171385053e-05, + "loss": 0.7959, + "step": 1452 + }, + { + "epoch": 0.5816, + "grad_norm": 7.8969197273254395, + "learning_rate": 1.9158028437938316e-05, + "loss": 0.3802, + "step": 1454 + }, + { + "epoch": 0.5824, + "grad_norm": 5.025742530822754, + "learning_rate": 1.9169208288346168e-05, + "loss": 0.3686, + "step": 1456 + }, + { + "epoch": 0.5832, + "grad_norm": 12.718505859375, + "learning_rate": 1.9180316635425876e-05, + "loss": 0.3675, + "step": 1458 + }, + { + "epoch": 0.584, + "grad_norm": 5.548830032348633, + "learning_rate": 1.9191353392552346e-05, + "loss": 0.8477, + "step": 1460 + }, + { + "epoch": 0.5848, + "grad_norm": 5.419297695159912, + "learning_rate": 1.9202318473658703e-05, + "loss": 0.521, + "step": 1462 + }, + { + "epoch": 0.5856, + "grad_norm": 2.6505277156829834, + "learning_rate": 1.9213211793237052e-05, + "loss": 0.4772, + "step": 1464 + }, + { + "epoch": 0.5864, + "grad_norm": 5.732573509216309, + "learning_rate": 1.92240332663391e-05, + "loss": 0.619, + "step": 1466 + }, + { + "epoch": 0.5872, + "grad_norm": 7.656153678894043, + "learning_rate": 1.923478280857682e-05, + "loss": 0.6467, + "step": 1468 + }, + { + "epoch": 0.588, + "grad_norm": 12.494869232177734, + "learning_rate": 1.924546033612313e-05, + "loss": 1.3374, + "step": 1470 + }, + { + "epoch": 0.5888, + "grad_norm": 2.8723483085632324, + "learning_rate": 1.9256065765712524e-05, + "loss": 0.4653, + "step": 1472 + }, + { + "epoch": 0.5896, + "grad_norm": 2.027968168258667, + "learning_rate": 1.9266599014641724e-05, + "loss": 0.4778, + "step": 1474 + }, + { + "epoch": 0.5904, + "grad_norm": 3.951679229736328, + "learning_rate": 1.927706000077034e-05, + "loss": 0.2609, + "step": 1476 + }, + { + "epoch": 0.5912, + "grad_norm": 9.217889785766602, + "learning_rate": 1.9287448642521507e-05, + "loss": 0.893, + "step": 1478 + }, + { + "epoch": 0.592, + "grad_norm": 11.40930461883545, + "learning_rate": 1.9297764858882516e-05, + "loss": 0.7237, + "step": 1480 + }, + { + "epoch": 0.5928, + "grad_norm": 2.650052070617676, + "learning_rate": 1.9308008569405424e-05, + "loss": 0.4074, + "step": 1482 + }, + { + "epoch": 0.5936, + "grad_norm": 4.919514179229736, + "learning_rate": 1.9318179694207722e-05, + "loss": 1.0755, + "step": 1484 + }, + { + "epoch": 0.5944, + "grad_norm": 5.501877784729004, + "learning_rate": 1.9328278153972943e-05, + "loss": 0.36, + "step": 1486 + }, + { + "epoch": 0.5952, + "grad_norm": 3.0425186157226562, + "learning_rate": 1.9338303869951266e-05, + "loss": 0.3789, + "step": 1488 + }, + { + "epoch": 0.596, + "grad_norm": 0.8871821165084839, + "learning_rate": 1.934825676396015e-05, + "loss": 0.131, + "step": 1490 + }, + { + "epoch": 0.5968, + "grad_norm": 6.4956464767456055, + "learning_rate": 1.935813675838491e-05, + "loss": 0.5635, + "step": 1492 + }, + { + "epoch": 0.5976, + "grad_norm": 8.6665620803833, + "learning_rate": 1.9367943776179375e-05, + "loss": 0.5478, + "step": 1494 + }, + { + "epoch": 0.5984, + "grad_norm": 13.301843643188477, + "learning_rate": 1.9377677740866457e-05, + "loss": 1.3023, + "step": 1496 + }, + { + "epoch": 0.5992, + "grad_norm": 8.54133129119873, + "learning_rate": 1.9387338576538743e-05, + "loss": 0.8299, + "step": 1498 + }, + { + "epoch": 0.6, + "grad_norm": 9.637697219848633, + "learning_rate": 1.9396926207859085e-05, + "loss": 0.592, + "step": 1500 + }, + { + "epoch": 0.6008, + "grad_norm": 6.597886562347412, + "learning_rate": 1.9406440560061214e-05, + "loss": 0.3459, + "step": 1502 + }, + { + "epoch": 0.6016, + "grad_norm": 9.389493942260742, + "learning_rate": 1.9415881558950302e-05, + "loss": 0.8848, + "step": 1504 + }, + { + "epoch": 0.6024, + "grad_norm": 4.483582973480225, + "learning_rate": 1.942524913090354e-05, + "loss": 0.2175, + "step": 1506 + }, + { + "epoch": 0.6032, + "grad_norm": 2.89554762840271, + "learning_rate": 1.9434543202870723e-05, + "loss": 2.5025, + "step": 1508 + }, + { + "epoch": 0.604, + "grad_norm": 3.055518388748169, + "learning_rate": 1.9443763702374815e-05, + "loss": 0.5473, + "step": 1510 + }, + { + "epoch": 0.6048, + "grad_norm": 1.5298120975494385, + "learning_rate": 1.9452910557512494e-05, + "loss": 0.3529, + "step": 1512 + }, + { + "epoch": 0.6056, + "grad_norm": 2.2401487827301025, + "learning_rate": 1.9461983696954756e-05, + "loss": 0.3263, + "step": 1514 + }, + { + "epoch": 0.6064, + "grad_norm": 6.360352993011475, + "learning_rate": 1.947098304994744e-05, + "loss": 0.5558, + "step": 1516 + }, + { + "epoch": 0.6072, + "grad_norm": 2.6135237216949463, + "learning_rate": 1.9479908546311787e-05, + "loss": 0.4786, + "step": 1518 + }, + { + "epoch": 0.608, + "grad_norm": 3.436858654022217, + "learning_rate": 1.9488760116444966e-05, + "loss": 0.4016, + "step": 1520 + }, + { + "epoch": 0.6088, + "grad_norm": 5.723163604736328, + "learning_rate": 1.949753769132067e-05, + "loss": 0.5532, + "step": 1522 + }, + { + "epoch": 0.6096, + "grad_norm": 5.951367378234863, + "learning_rate": 1.95062412024896e-05, + "loss": 0.6564, + "step": 1524 + }, + { + "epoch": 0.6104, + "grad_norm": 6.127574920654297, + "learning_rate": 1.951487058208003e-05, + "loss": 0.5847, + "step": 1526 + }, + { + "epoch": 0.6112, + "grad_norm": 0.4084920287132263, + "learning_rate": 1.952342576279833e-05, + "loss": 0.1559, + "step": 1528 + }, + { + "epoch": 0.612, + "grad_norm": 4.772994518280029, + "learning_rate": 1.953190667792947e-05, + "loss": 0.3836, + "step": 1530 + }, + { + "epoch": 0.6128, + "grad_norm": 3.275306463241577, + "learning_rate": 1.9540313261337578e-05, + "loss": 0.4954, + "step": 1532 + }, + { + "epoch": 0.6136, + "grad_norm": 17.405899047851562, + "learning_rate": 1.954864544746643e-05, + "loss": 0.9868, + "step": 1534 + }, + { + "epoch": 0.6144, + "grad_norm": 3.4976556301116943, + "learning_rate": 1.955690317133996e-05, + "loss": 0.5427, + "step": 1536 + }, + { + "epoch": 0.6152, + "grad_norm": 4.741286277770996, + "learning_rate": 1.956508636856278e-05, + "loss": 0.5814, + "step": 1538 + }, + { + "epoch": 0.616, + "grad_norm": 9.094281196594238, + "learning_rate": 1.9573194975320672e-05, + "loss": 0.7027, + "step": 1540 + }, + { + "epoch": 0.6168, + "grad_norm": 10.391242027282715, + "learning_rate": 1.95812289283811e-05, + "loss": 0.6991, + "step": 1542 + }, + { + "epoch": 0.6176, + "grad_norm": 4.268300533294678, + "learning_rate": 1.958918816509367e-05, + "loss": 0.5425, + "step": 1544 + }, + { + "epoch": 0.6184, + "grad_norm": 2.675271511077881, + "learning_rate": 1.9597072623390668e-05, + "loss": 0.427, + "step": 1546 + }, + { + "epoch": 0.6192, + "grad_norm": 4.287508010864258, + "learning_rate": 1.9604882241787496e-05, + "loss": 0.4181, + "step": 1548 + }, + { + "epoch": 0.62, + "grad_norm": 5.404676914215088, + "learning_rate": 1.9612616959383187e-05, + "loss": 0.4235, + "step": 1550 + }, + { + "epoch": 0.6208, + "grad_norm": 13.730271339416504, + "learning_rate": 1.9620276715860856e-05, + "loss": 0.8335, + "step": 1552 + }, + { + "epoch": 0.6216, + "grad_norm": 4.315786361694336, + "learning_rate": 1.9627861451488187e-05, + "loss": 0.5621, + "step": 1554 + }, + { + "epoch": 0.6224, + "grad_norm": 7.495677947998047, + "learning_rate": 1.963537110711789e-05, + "loss": 0.4497, + "step": 1556 + }, + { + "epoch": 0.6232, + "grad_norm": 3.7830357551574707, + "learning_rate": 1.964280562418815e-05, + "loss": 0.2931, + "step": 1558 + }, + { + "epoch": 0.624, + "grad_norm": 11.285057067871094, + "learning_rate": 1.9650164944723116e-05, + "loss": 0.7102, + "step": 1560 + }, + { + "epoch": 0.6248, + "grad_norm": 4.690367221832275, + "learning_rate": 1.9657449011333328e-05, + "loss": 0.3579, + "step": 1562 + }, + { + "epoch": 0.6256, + "grad_norm": 5.907130718231201, + "learning_rate": 1.9664657767216176e-05, + "loss": 0.5712, + "step": 1564 + }, + { + "epoch": 0.6264, + "grad_norm": 4.572456359863281, + "learning_rate": 1.967179115615633e-05, + "loss": 0.4964, + "step": 1566 + }, + { + "epoch": 0.6272, + "grad_norm": 6.378812313079834, + "learning_rate": 1.967884912252619e-05, + "loss": 0.4332, + "step": 1568 + }, + { + "epoch": 0.628, + "grad_norm": 4.807539463043213, + "learning_rate": 1.9685831611286312e-05, + "loss": 0.3473, + "step": 1570 + }, + { + "epoch": 0.6288, + "grad_norm": 7.2378339767456055, + "learning_rate": 1.969273856798585e-05, + "loss": 0.4192, + "step": 1572 + }, + { + "epoch": 0.6296, + "grad_norm": 8.278740882873535, + "learning_rate": 1.9699569938762972e-05, + "loss": 0.4816, + "step": 1574 + }, + { + "epoch": 0.6304, + "grad_norm": 8.5382719039917, + "learning_rate": 1.9706325670345276e-05, + "loss": 0.6862, + "step": 1576 + }, + { + "epoch": 0.6312, + "grad_norm": 3.933340549468994, + "learning_rate": 1.9713005710050203e-05, + "loss": 0.4656, + "step": 1578 + }, + { + "epoch": 0.632, + "grad_norm": 7.442902565002441, + "learning_rate": 1.9719610005785466e-05, + "loss": 0.4644, + "step": 1580 + }, + { + "epoch": 0.6328, + "grad_norm": 10.707149505615234, + "learning_rate": 1.9726138506049434e-05, + "loss": 0.6171, + "step": 1582 + }, + { + "epoch": 0.6336, + "grad_norm": 2.4153261184692383, + "learning_rate": 1.9732591159931564e-05, + "loss": 0.4276, + "step": 1584 + }, + { + "epoch": 0.6344, + "grad_norm": 2.093482732772827, + "learning_rate": 1.9738967917112752e-05, + "loss": 0.3721, + "step": 1586 + }, + { + "epoch": 0.6352, + "grad_norm": 5.4622344970703125, + "learning_rate": 1.974526872786577e-05, + "loss": 0.4802, + "step": 1588 + }, + { + "epoch": 0.636, + "grad_norm": 5.805969715118408, + "learning_rate": 1.9751493543055634e-05, + "loss": 0.9636, + "step": 1590 + }, + { + "epoch": 0.6368, + "grad_norm": 9.242039680480957, + "learning_rate": 1.9757642314139977e-05, + "loss": 0.8385, + "step": 1592 + }, + { + "epoch": 0.6376, + "grad_norm": 4.233711242675781, + "learning_rate": 1.976371499316945e-05, + "loss": 0.2892, + "step": 1594 + }, + { + "epoch": 0.6384, + "grad_norm": 8.57029914855957, + "learning_rate": 1.9769711532788083e-05, + "loss": 0.5883, + "step": 1596 + }, + { + "epoch": 0.6392, + "grad_norm": 3.897994041442871, + "learning_rate": 1.9775631886233655e-05, + "loss": 0.4078, + "step": 1598 + }, + { + "epoch": 0.64, + "grad_norm": 3.17345929145813, + "learning_rate": 1.9781476007338054e-05, + "loss": 0.3577, + "step": 1600 + }, + { + "epoch": 0.6408, + "grad_norm": 11.469186782836914, + "learning_rate": 1.978724385052766e-05, + "loss": 0.5497, + "step": 1602 + }, + { + "epoch": 0.6416, + "grad_norm": 1.5659518241882324, + "learning_rate": 1.9792935370823673e-05, + "loss": 0.2753, + "step": 1604 + }, + { + "epoch": 0.6424, + "grad_norm": 2.373509168624878, + "learning_rate": 1.979855052384247e-05, + "loss": 0.3695, + "step": 1606 + }, + { + "epoch": 0.6432, + "grad_norm": 2.043501138687134, + "learning_rate": 1.9804089265795956e-05, + "loss": 0.3488, + "step": 1608 + }, + { + "epoch": 0.644, + "grad_norm": 4.598385334014893, + "learning_rate": 1.9809551553491918e-05, + "loss": 0.6336, + "step": 1610 + }, + { + "epoch": 0.6448, + "grad_norm": 2.1152210235595703, + "learning_rate": 1.981493734433433e-05, + "loss": 0.302, + "step": 1612 + }, + { + "epoch": 0.6456, + "grad_norm": 2.2232983112335205, + "learning_rate": 1.982024659632372e-05, + "loss": 0.3294, + "step": 1614 + }, + { + "epoch": 0.6464, + "grad_norm": 5.589328289031982, + "learning_rate": 1.9825479268057472e-05, + "loss": 0.2848, + "step": 1616 + }, + { + "epoch": 0.6472, + "grad_norm": 7.114500522613525, + "learning_rate": 1.9830635318730155e-05, + "loss": 0.7318, + "step": 1618 + }, + { + "epoch": 0.648, + "grad_norm": 5.905538082122803, + "learning_rate": 1.9835714708133858e-05, + "loss": 0.3707, + "step": 1620 + }, + { + "epoch": 0.6488, + "grad_norm": 2.066833734512329, + "learning_rate": 1.9840717396658483e-05, + "loss": 0.5473, + "step": 1622 + }, + { + "epoch": 0.6496, + "grad_norm": 3.5688998699188232, + "learning_rate": 1.9845643345292055e-05, + "loss": 0.3819, + "step": 1624 + }, + { + "epoch": 0.6504, + "grad_norm": 1.7835557460784912, + "learning_rate": 1.9850492515621038e-05, + "loss": 0.309, + "step": 1626 + }, + { + "epoch": 0.6512, + "grad_norm": 2.2958924770355225, + "learning_rate": 1.985526486983063e-05, + "loss": 0.2285, + "step": 1628 + }, + { + "epoch": 0.652, + "grad_norm": 3.196260452270508, + "learning_rate": 1.985996037070505e-05, + "loss": 0.5863, + "step": 1630 + }, + { + "epoch": 0.6528, + "grad_norm": 8.522262573242188, + "learning_rate": 1.9864578981627844e-05, + "loss": 0.8679, + "step": 1632 + }, + { + "epoch": 0.6536, + "grad_norm": 3.0343809127807617, + "learning_rate": 1.9869120666582153e-05, + "loss": 0.5473, + "step": 1634 + }, + { + "epoch": 0.6544, + "grad_norm": 2.9875917434692383, + "learning_rate": 1.9873585390151003e-05, + "loss": 0.402, + "step": 1636 + }, + { + "epoch": 0.6552, + "grad_norm": 10.342791557312012, + "learning_rate": 1.987797311751759e-05, + "loss": 0.4833, + "step": 1638 + }, + { + "epoch": 0.656, + "grad_norm": 6.520310401916504, + "learning_rate": 1.9882283814465528e-05, + "loss": 0.3338, + "step": 1640 + }, + { + "epoch": 0.6568, + "grad_norm": 2.931133508682251, + "learning_rate": 1.988651744737914e-05, + "loss": 0.2669, + "step": 1642 + }, + { + "epoch": 0.6576, + "grad_norm": 11.377016067504883, + "learning_rate": 1.9890673983243704e-05, + "loss": 0.6862, + "step": 1644 + }, + { + "epoch": 0.6584, + "grad_norm": 6.504305362701416, + "learning_rate": 1.9894753389645723e-05, + "loss": 0.329, + "step": 1646 + }, + { + "epoch": 0.6592, + "grad_norm": 2.494025230407715, + "learning_rate": 1.9898755634773155e-05, + "loss": 0.3779, + "step": 1648 + }, + { + "epoch": 0.66, + "grad_norm": 9.32198429107666, + "learning_rate": 1.9902680687415704e-05, + "loss": 0.5162, + "step": 1650 + }, + { + "epoch": 0.6608, + "grad_norm": 4.862305641174316, + "learning_rate": 1.9906528516965014e-05, + "loss": 0.6947, + "step": 1652 + }, + { + "epoch": 0.6616, + "grad_norm": 12.738851547241211, + "learning_rate": 1.9910299093414926e-05, + "loss": 0.4528, + "step": 1654 + }, + { + "epoch": 0.6624, + "grad_norm": 2.390115737915039, + "learning_rate": 1.9913992387361744e-05, + "loss": 0.4201, + "step": 1656 + }, + { + "epoch": 0.6632, + "grad_norm": 7.348924160003662, + "learning_rate": 1.9917608370004414e-05, + "loss": 0.4181, + "step": 1658 + }, + { + "epoch": 0.664, + "grad_norm": 3.2150723934173584, + "learning_rate": 1.9921147013144782e-05, + "loss": 0.3665, + "step": 1660 + }, + { + "epoch": 0.6648, + "grad_norm": 4.1383819580078125, + "learning_rate": 1.9924608289187786e-05, + "loss": 0.4743, + "step": 1662 + }, + { + "epoch": 0.6656, + "grad_norm": 6.19020938873291, + "learning_rate": 1.9927992171141707e-05, + "loss": 0.6406, + "step": 1664 + }, + { + "epoch": 0.6664, + "grad_norm": 2.8534698486328125, + "learning_rate": 1.9931298632618355e-05, + "loss": 0.2139, + "step": 1666 + }, + { + "epoch": 0.6672, + "grad_norm": 6.954126834869385, + "learning_rate": 1.9934527647833276e-05, + "loss": 0.4103, + "step": 1668 + }, + { + "epoch": 0.668, + "grad_norm": 1.7877683639526367, + "learning_rate": 1.9937679191605964e-05, + "loss": 0.4074, + "step": 1670 + }, + { + "epoch": 0.6688, + "grad_norm": 3.242607593536377, + "learning_rate": 1.9940753239360047e-05, + "loss": 0.3618, + "step": 1672 + }, + { + "epoch": 0.6696, + "grad_norm": 3.0980658531188965, + "learning_rate": 1.994374976712348e-05, + "loss": 0.4763, + "step": 1674 + }, + { + "epoch": 0.6704, + "grad_norm": 4.143883228302002, + "learning_rate": 1.994666875152874e-05, + "loss": 0.5616, + "step": 1676 + }, + { + "epoch": 0.6712, + "grad_norm": 2.5318260192871094, + "learning_rate": 1.9949510169813003e-05, + "loss": 0.2323, + "step": 1678 + }, + { + "epoch": 0.672, + "grad_norm": 12.222033500671387, + "learning_rate": 1.9952273999818312e-05, + "loss": 0.4916, + "step": 1680 + }, + { + "epoch": 0.6728, + "grad_norm": 3.9567325115203857, + "learning_rate": 1.995496021999177e-05, + "loss": 0.4506, + "step": 1682 + }, + { + "epoch": 0.6736, + "grad_norm": 2.8990304470062256, + "learning_rate": 1.9957568809385693e-05, + "loss": 0.4624, + "step": 1684 + }, + { + "epoch": 0.6744, + "grad_norm": 3.471292018890381, + "learning_rate": 1.9960099747657774e-05, + "loss": 0.4444, + "step": 1686 + }, + { + "epoch": 0.6752, + "grad_norm": 3.2368650436401367, + "learning_rate": 1.996255301507125e-05, + "loss": 0.3354, + "step": 1688 + }, + { + "epoch": 0.676, + "grad_norm": 4.808667182922363, + "learning_rate": 1.9964928592495046e-05, + "loss": 0.8105, + "step": 1690 + }, + { + "epoch": 0.6768, + "grad_norm": 2.451111316680908, + "learning_rate": 1.9967226461403934e-05, + "loss": 0.337, + "step": 1692 + }, + { + "epoch": 0.6776, + "grad_norm": 11.671585083007812, + "learning_rate": 1.996944660387867e-05, + "loss": 0.4399, + "step": 1694 + }, + { + "epoch": 0.6784, + "grad_norm": 7.658277988433838, + "learning_rate": 1.997158900260614e-05, + "loss": 0.4076, + "step": 1696 + }, + { + "epoch": 0.6792, + "grad_norm": 5.0221686363220215, + "learning_rate": 1.9973653640879486e-05, + "loss": 0.4134, + "step": 1698 + }, + { + "epoch": 0.68, + "grad_norm": 7.082271575927734, + "learning_rate": 1.9975640502598246e-05, + "loss": 0.5284, + "step": 1700 + }, + { + "epoch": 0.6808, + "grad_norm": 3.120457649230957, + "learning_rate": 1.997754957226847e-05, + "loss": 0.5992, + "step": 1702 + }, + { + "epoch": 0.6816, + "grad_norm": 1.900876760482788, + "learning_rate": 1.9979380835002846e-05, + "loss": 0.3063, + "step": 1704 + }, + { + "epoch": 0.6824, + "grad_norm": 9.405780792236328, + "learning_rate": 1.9981134276520828e-05, + "loss": 0.3785, + "step": 1706 + }, + { + "epoch": 0.6832, + "grad_norm": 2.7329249382019043, + "learning_rate": 1.998280988314872e-05, + "loss": 0.2797, + "step": 1708 + }, + { + "epoch": 0.684, + "grad_norm": 2.579680919647217, + "learning_rate": 1.998440764181981e-05, + "loss": 0.471, + "step": 1710 + }, + { + "epoch": 0.6848, + "grad_norm": 5.701201915740967, + "learning_rate": 1.9985927540074453e-05, + "loss": 0.3976, + "step": 1712 + }, + { + "epoch": 0.6856, + "grad_norm": 7.745370864868164, + "learning_rate": 1.998736956606018e-05, + "loss": 0.3947, + "step": 1714 + }, + { + "epoch": 0.6864, + "grad_norm": 8.640480995178223, + "learning_rate": 1.9988733708531772e-05, + "loss": 1.0942, + "step": 1716 + }, + { + "epoch": 0.6872, + "grad_norm": 8.300025939941406, + "learning_rate": 1.9990019956851384e-05, + "loss": 0.7977, + "step": 1718 + }, + { + "epoch": 0.688, + "grad_norm": 5.9511895179748535, + "learning_rate": 1.9991228300988586e-05, + "loss": 0.4706, + "step": 1720 + }, + { + "epoch": 0.6888, + "grad_norm": 5.14940881729126, + "learning_rate": 1.999235873152047e-05, + "loss": 0.2438, + "step": 1722 + }, + { + "epoch": 0.6896, + "grad_norm": 6.6042985916137695, + "learning_rate": 1.9993411239631713e-05, + "loss": 0.3952, + "step": 1724 + }, + { + "epoch": 0.6904, + "grad_norm": 6.875572681427002, + "learning_rate": 1.9994385817114644e-05, + "loss": 0.4717, + "step": 1726 + }, + { + "epoch": 0.6912, + "grad_norm": 2.505627155303955, + "learning_rate": 1.9995282456369313e-05, + "loss": 0.3459, + "step": 1728 + }, + { + "epoch": 0.692, + "grad_norm": 6.113884925842285, + "learning_rate": 1.9996101150403543e-05, + "loss": 0.5111, + "step": 1730 + }, + { + "epoch": 0.6928, + "grad_norm": 2.1594738960266113, + "learning_rate": 1.9996841892833e-05, + "loss": 0.2743, + "step": 1732 + }, + { + "epoch": 0.6936, + "grad_norm": 20.72479820251465, + "learning_rate": 1.9997504677881224e-05, + "loss": 0.9828, + "step": 1734 + }, + { + "epoch": 0.6944, + "grad_norm": 7.1836700439453125, + "learning_rate": 1.999808950037968e-05, + "loss": 0.493, + "step": 1736 + }, + { + "epoch": 0.6952, + "grad_norm": 5.67575740814209, + "learning_rate": 1.9998596355767805e-05, + "loss": 0.3416, + "step": 1738 + }, + { + "epoch": 0.696, + "grad_norm": 9.29202651977539, + "learning_rate": 1.9999025240093045e-05, + "loss": 0.5197, + "step": 1740 + }, + { + "epoch": 0.6968, + "grad_norm": 5.713569164276123, + "learning_rate": 1.9999376150010868e-05, + "loss": 0.4243, + "step": 1742 + }, + { + "epoch": 0.6976, + "grad_norm": 3.9301981925964355, + "learning_rate": 1.9999649082784807e-05, + "loss": 0.3626, + "step": 1744 + }, + { + "epoch": 0.6984, + "grad_norm": 3.975416898727417, + "learning_rate": 1.9999844036286483e-05, + "loss": 0.8586, + "step": 1746 + }, + { + "epoch": 0.6992, + "grad_norm": 6.84246826171875, + "learning_rate": 1.9999961008995607e-05, + "loss": 0.7227, + "step": 1748 + }, + { + "epoch": 0.7, + "grad_norm": 20.004087448120117, + "learning_rate": 2e-05, + "loss": 0.9191, + "step": 1750 + }, + { + "epoch": 0.7008, + "grad_norm": 5.791635990142822, + "learning_rate": 1.9999961008995607e-05, + "loss": 0.4023, + "step": 1752 + }, + { + "epoch": 0.7016, + "grad_norm": 5.369904041290283, + "learning_rate": 1.9999844036286483e-05, + "loss": 0.3795, + "step": 1754 + }, + { + "epoch": 0.7024, + "grad_norm": 3.682668924331665, + "learning_rate": 1.9999649082784807e-05, + "loss": 0.5381, + "step": 1756 + }, + { + "epoch": 0.7032, + "grad_norm": 2.7093493938446045, + "learning_rate": 1.9999376150010868e-05, + "loss": 0.6459, + "step": 1758 + }, + { + "epoch": 0.704, + "grad_norm": 2.522735357284546, + "learning_rate": 1.9999025240093045e-05, + "loss": 0.5787, + "step": 1760 + }, + { + "epoch": 0.7048, + "grad_norm": 8.20150375366211, + "learning_rate": 1.9998596355767805e-05, + "loss": 0.5729, + "step": 1762 + }, + { + "epoch": 0.7056, + "grad_norm": 8.837404251098633, + "learning_rate": 1.999808950037968e-05, + "loss": 0.5134, + "step": 1764 + }, + { + "epoch": 0.7064, + "grad_norm": 5.532891273498535, + "learning_rate": 1.9997504677881224e-05, + "loss": 0.7412, + "step": 1766 + }, + { + "epoch": 0.7072, + "grad_norm": 5.736209869384766, + "learning_rate": 1.9996841892833e-05, + "loss": 0.5258, + "step": 1768 + }, + { + "epoch": 0.708, + "grad_norm": 4.132336616516113, + "learning_rate": 1.9996101150403547e-05, + "loss": 0.3544, + "step": 1770 + }, + { + "epoch": 0.7088, + "grad_norm": 4.415733337402344, + "learning_rate": 1.9995282456369313e-05, + "loss": 0.4621, + "step": 1772 + }, + { + "epoch": 0.7096, + "grad_norm": 4.9113264083862305, + "learning_rate": 1.9994385817114644e-05, + "loss": 0.5411, + "step": 1774 + }, + { + "epoch": 0.7104, + "grad_norm": 3.3670785427093506, + "learning_rate": 1.9993411239631713e-05, + "loss": 0.3433, + "step": 1776 + }, + { + "epoch": 0.7112, + "grad_norm": 3.183927059173584, + "learning_rate": 1.999235873152047e-05, + "loss": 0.4211, + "step": 1778 + }, + { + "epoch": 0.712, + "grad_norm": 8.622447967529297, + "learning_rate": 1.9991228300988586e-05, + "loss": 1.7236, + "step": 1780 + }, + { + "epoch": 0.7128, + "grad_norm": 3.417908191680908, + "learning_rate": 1.9990019956851384e-05, + "loss": 0.2192, + "step": 1782 + }, + { + "epoch": 0.7136, + "grad_norm": 15.327376365661621, + "learning_rate": 1.9988733708531772e-05, + "loss": 0.949, + "step": 1784 + }, + { + "epoch": 0.7144, + "grad_norm": 6.369883060455322, + "learning_rate": 1.998736956606018e-05, + "loss": 0.5447, + "step": 1786 + }, + { + "epoch": 0.7152, + "grad_norm": 5.282643795013428, + "learning_rate": 1.9985927540074453e-05, + "loss": 0.2633, + "step": 1788 + }, + { + "epoch": 0.716, + "grad_norm": 1.8992022275924683, + "learning_rate": 1.9984407641819812e-05, + "loss": 0.3532, + "step": 1790 + }, + { + "epoch": 0.7168, + "grad_norm": 9.116549491882324, + "learning_rate": 1.998280988314872e-05, + "loss": 0.5309, + "step": 1792 + }, + { + "epoch": 0.7176, + "grad_norm": 4.018207550048828, + "learning_rate": 1.9981134276520828e-05, + "loss": 0.6177, + "step": 1794 + }, + { + "epoch": 0.7184, + "grad_norm": 3.2702534198760986, + "learning_rate": 1.9979380835002846e-05, + "loss": 0.5697, + "step": 1796 + }, + { + "epoch": 0.7192, + "grad_norm": 13.79327392578125, + "learning_rate": 1.9977549572268467e-05, + "loss": 0.86, + "step": 1798 + }, + { + "epoch": 0.72, + "grad_norm": 2.421355724334717, + "learning_rate": 1.9975640502598246e-05, + "loss": 0.3692, + "step": 1800 + }, + { + "epoch": 0.7208, + "grad_norm": 6.39658260345459, + "learning_rate": 1.9973653640879486e-05, + "loss": 0.5341, + "step": 1802 + }, + { + "epoch": 0.7216, + "grad_norm": 12.072526931762695, + "learning_rate": 1.997158900260614e-05, + "loss": 0.843, + "step": 1804 + }, + { + "epoch": 0.7224, + "grad_norm": 6.202824115753174, + "learning_rate": 1.9969446603878673e-05, + "loss": 0.4508, + "step": 1806 + }, + { + "epoch": 0.7232, + "grad_norm": 12.091536521911621, + "learning_rate": 1.9967226461403934e-05, + "loss": 0.6631, + "step": 1808 + }, + { + "epoch": 0.724, + "grad_norm": 3.4513208866119385, + "learning_rate": 1.9964928592495046e-05, + "loss": 0.3141, + "step": 1810 + }, + { + "epoch": 0.7248, + "grad_norm": 11.12137508392334, + "learning_rate": 1.996255301507125e-05, + "loss": 0.539, + "step": 1812 + }, + { + "epoch": 0.7256, + "grad_norm": 2.0206141471862793, + "learning_rate": 1.9960099747657774e-05, + "loss": 0.658, + "step": 1814 + }, + { + "epoch": 0.7264, + "grad_norm": 7.8277106285095215, + "learning_rate": 1.9957568809385693e-05, + "loss": 0.5622, + "step": 1816 + }, + { + "epoch": 0.7272, + "grad_norm": 4.81088924407959, + "learning_rate": 1.995496021999177e-05, + "loss": 0.3566, + "step": 1818 + }, + { + "epoch": 0.728, + "grad_norm": 4.36074161529541, + "learning_rate": 1.9952273999818312e-05, + "loss": 0.4191, + "step": 1820 + }, + { + "epoch": 0.7288, + "grad_norm": 4.0613226890563965, + "learning_rate": 1.9949510169813006e-05, + "loss": 0.4074, + "step": 1822 + }, + { + "epoch": 0.7296, + "grad_norm": 5.445612907409668, + "learning_rate": 1.9946668751528745e-05, + "loss": 0.3167, + "step": 1824 + }, + { + "epoch": 0.7304, + "grad_norm": 2.8114616870880127, + "learning_rate": 1.994374976712348e-05, + "loss": 0.6147, + "step": 1826 + }, + { + "epoch": 0.7312, + "grad_norm": 2.5986952781677246, + "learning_rate": 1.9940753239360047e-05, + "loss": 0.5259, + "step": 1828 + }, + { + "epoch": 0.732, + "grad_norm": 3.6003918647766113, + "learning_rate": 1.9937679191605964e-05, + "loss": 0.3197, + "step": 1830 + }, + { + "epoch": 0.7328, + "grad_norm": 3.8639371395111084, + "learning_rate": 1.993452764783328e-05, + "loss": 0.21, + "step": 1832 + }, + { + "epoch": 0.7336, + "grad_norm": 2.496325969696045, + "learning_rate": 1.9931298632618352e-05, + "loss": 0.3632, + "step": 1834 + }, + { + "epoch": 0.7344, + "grad_norm": 2.928487777709961, + "learning_rate": 1.9927992171141707e-05, + "loss": 0.3398, + "step": 1836 + }, + { + "epoch": 0.7352, + "grad_norm": 5.671520709991455, + "learning_rate": 1.9924608289187786e-05, + "loss": 0.5193, + "step": 1838 + }, + { + "epoch": 0.736, + "grad_norm": 3.602909803390503, + "learning_rate": 1.9921147013144782e-05, + "loss": 0.1562, + "step": 1840 + }, + { + "epoch": 0.7368, + "grad_norm": 2.749011993408203, + "learning_rate": 1.9917608370004417e-05, + "loss": 0.315, + "step": 1842 + }, + { + "epoch": 0.7376, + "grad_norm": 4.273372173309326, + "learning_rate": 1.9913992387361747e-05, + "loss": 0.3395, + "step": 1844 + }, + { + "epoch": 0.7384, + "grad_norm": 5.967968463897705, + "learning_rate": 1.9910299093414932e-05, + "loss": 0.4194, + "step": 1846 + }, + { + "epoch": 0.7392, + "grad_norm": 2.5091896057128906, + "learning_rate": 1.990652851696501e-05, + "loss": 0.5031, + "step": 1848 + }, + { + "epoch": 0.74, + "grad_norm": 7.949019432067871, + "learning_rate": 1.9902680687415704e-05, + "loss": 1.0475, + "step": 1850 + }, + { + "epoch": 0.7408, + "grad_norm": 2.438105821609497, + "learning_rate": 1.9898755634773155e-05, + "loss": 0.3475, + "step": 1852 + }, + { + "epoch": 0.7416, + "grad_norm": 8.848106384277344, + "learning_rate": 1.9894753389645723e-05, + "loss": 0.4821, + "step": 1854 + }, + { + "epoch": 0.7424, + "grad_norm": 5.018491744995117, + "learning_rate": 1.9890673983243704e-05, + "loss": 0.3251, + "step": 1856 + }, + { + "epoch": 0.7432, + "grad_norm": 5.742210388183594, + "learning_rate": 1.9886517447379143e-05, + "loss": 0.2919, + "step": 1858 + }, + { + "epoch": 0.744, + "grad_norm": 10.081647872924805, + "learning_rate": 1.988228381446553e-05, + "loss": 0.5546, + "step": 1860 + }, + { + "epoch": 0.7448, + "grad_norm": 4.09962272644043, + "learning_rate": 1.987797311751759e-05, + "loss": 0.3861, + "step": 1862 + }, + { + "epoch": 0.7456, + "grad_norm": 3.1583993434906006, + "learning_rate": 1.9873585390151007e-05, + "loss": 0.3128, + "step": 1864 + }, + { + "epoch": 0.7464, + "grad_norm": 1.916212797164917, + "learning_rate": 1.9869120666582153e-05, + "loss": 0.3608, + "step": 1866 + }, + { + "epoch": 0.7472, + "grad_norm": 8.458840370178223, + "learning_rate": 1.9864578981627844e-05, + "loss": 0.7873, + "step": 1868 + }, + { + "epoch": 0.748, + "grad_norm": 6.865233898162842, + "learning_rate": 1.985996037070505e-05, + "loss": 0.3612, + "step": 1870 + }, + { + "epoch": 0.7488, + "grad_norm": 10.543816566467285, + "learning_rate": 1.985526486983063e-05, + "loss": 0.698, + "step": 1872 + }, + { + "epoch": 0.7496, + "grad_norm": 6.476944446563721, + "learning_rate": 1.9850492515621038e-05, + "loss": 0.5256, + "step": 1874 + }, + { + "epoch": 0.7504, + "grad_norm": 8.187792778015137, + "learning_rate": 1.9845643345292055e-05, + "loss": 0.4788, + "step": 1876 + }, + { + "epoch": 0.7512, + "grad_norm": 1.9630470275878906, + "learning_rate": 1.9840717396658483e-05, + "loss": 0.2404, + "step": 1878 + }, + { + "epoch": 0.752, + "grad_norm": 8.175183296203613, + "learning_rate": 1.983571470813386e-05, + "loss": 0.699, + "step": 1880 + }, + { + "epoch": 0.7528, + "grad_norm": 12.990069389343262, + "learning_rate": 1.983063531873016e-05, + "loss": 1.1128, + "step": 1882 + }, + { + "epoch": 0.7536, + "grad_norm": 5.050752639770508, + "learning_rate": 1.982547926805747e-05, + "loss": 0.7573, + "step": 1884 + }, + { + "epoch": 0.7544, + "grad_norm": 6.0260515213012695, + "learning_rate": 1.9820246596323724e-05, + "loss": 0.3665, + "step": 1886 + }, + { + "epoch": 0.7552, + "grad_norm": 14.117484092712402, + "learning_rate": 1.981493734433433e-05, + "loss": 0.8667, + "step": 1888 + }, + { + "epoch": 0.756, + "grad_norm": 11.667659759521484, + "learning_rate": 1.9809551553491918e-05, + "loss": 0.6158, + "step": 1890 + }, + { + "epoch": 0.7568, + "grad_norm": 5.7208380699157715, + "learning_rate": 1.9804089265795963e-05, + "loss": 0.5358, + "step": 1892 + }, + { + "epoch": 0.7576, + "grad_norm": 5.21250057220459, + "learning_rate": 1.979855052384247e-05, + "loss": 0.4207, + "step": 1894 + }, + { + "epoch": 0.7584, + "grad_norm": 3.6927437782287598, + "learning_rate": 1.979293537082368e-05, + "loss": 0.6051, + "step": 1896 + }, + { + "epoch": 0.7592, + "grad_norm": 4.236647605895996, + "learning_rate": 1.9787243850527663e-05, + "loss": 0.3979, + "step": 1898 + }, + { + "epoch": 0.76, + "grad_norm": 5.474388122558594, + "learning_rate": 1.978147600733806e-05, + "loss": 0.6603, + "step": 1900 + }, + { + "epoch": 0.7608, + "grad_norm": 3.9843273162841797, + "learning_rate": 1.977563188623365e-05, + "loss": 0.4227, + "step": 1902 + }, + { + "epoch": 0.7616, + "grad_norm": 2.3406167030334473, + "learning_rate": 1.9769711532788086e-05, + "loss": 0.2071, + "step": 1904 + }, + { + "epoch": 0.7624, + "grad_norm": 8.789372444152832, + "learning_rate": 1.9763714993169448e-05, + "loss": 0.5955, + "step": 1906 + }, + { + "epoch": 0.7632, + "grad_norm": 4.878142833709717, + "learning_rate": 1.9757642314139977e-05, + "loss": 0.3273, + "step": 1908 + }, + { + "epoch": 0.764, + "grad_norm": 3.703111410140991, + "learning_rate": 1.9751493543055638e-05, + "loss": 0.2455, + "step": 1910 + }, + { + "epoch": 0.7648, + "grad_norm": 1.3227205276489258, + "learning_rate": 1.9745268727865774e-05, + "loss": 0.1464, + "step": 1912 + }, + { + "epoch": 0.7656, + "grad_norm": 2.471626043319702, + "learning_rate": 1.973896791711276e-05, + "loss": 0.4114, + "step": 1914 + }, + { + "epoch": 0.7664, + "grad_norm": 6.342803955078125, + "learning_rate": 1.9732591159931567e-05, + "loss": 0.3901, + "step": 1916 + }, + { + "epoch": 0.7672, + "grad_norm": 3.0534446239471436, + "learning_rate": 1.972613850604944e-05, + "loss": 0.6209, + "step": 1918 + }, + { + "epoch": 0.768, + "grad_norm": 6.819042682647705, + "learning_rate": 1.9719610005785463e-05, + "loss": 0.4309, + "step": 1920 + }, + { + "epoch": 0.7688, + "grad_norm": 7.792922496795654, + "learning_rate": 1.9713005710050206e-05, + "loss": 0.4873, + "step": 1922 + }, + { + "epoch": 0.7696, + "grad_norm": 5.196712493896484, + "learning_rate": 1.9706325670345276e-05, + "loss": 0.3977, + "step": 1924 + }, + { + "epoch": 0.7704, + "grad_norm": 5.214966297149658, + "learning_rate": 1.9699569938762975e-05, + "loss": 0.4326, + "step": 1926 + }, + { + "epoch": 0.7712, + "grad_norm": 8.276177406311035, + "learning_rate": 1.969273856798586e-05, + "loss": 0.446, + "step": 1928 + }, + { + "epoch": 0.772, + "grad_norm": 3.4883368015289307, + "learning_rate": 1.9685831611286312e-05, + "loss": 0.3672, + "step": 1930 + }, + { + "epoch": 0.7728, + "grad_norm": 1.9015693664550781, + "learning_rate": 1.9678849122526195e-05, + "loss": 0.3345, + "step": 1932 + }, + { + "epoch": 0.7736, + "grad_norm": 8.110586166381836, + "learning_rate": 1.967179115615633e-05, + "loss": 0.4249, + "step": 1934 + }, + { + "epoch": 0.7744, + "grad_norm": 4.227611064910889, + "learning_rate": 1.966465776721618e-05, + "loss": 0.6143, + "step": 1936 + }, + { + "epoch": 0.7752, + "grad_norm": 2.341872215270996, + "learning_rate": 1.9657449011333328e-05, + "loss": 0.5497, + "step": 1938 + }, + { + "epoch": 0.776, + "grad_norm": 2.357506513595581, + "learning_rate": 1.965016494472312e-05, + "loss": 0.2292, + "step": 1940 + }, + { + "epoch": 0.7768, + "grad_norm": 3.373274087905884, + "learning_rate": 1.964280562418815e-05, + "loss": 0.2772, + "step": 1942 + }, + { + "epoch": 0.7776, + "grad_norm": 27.803871154785156, + "learning_rate": 1.963537110711789e-05, + "loss": 0.8697, + "step": 1944 + }, + { + "epoch": 0.7784, + "grad_norm": 4.842161655426025, + "learning_rate": 1.9627861451488194e-05, + "loss": 0.4879, + "step": 1946 + }, + { + "epoch": 0.7792, + "grad_norm": 8.67030143737793, + "learning_rate": 1.962027671586086e-05, + "loss": 0.5492, + "step": 1948 + }, + { + "epoch": 0.78, + "grad_norm": 3.379067897796631, + "learning_rate": 1.9612616959383194e-05, + "loss": 0.3028, + "step": 1950 + }, + { + "epoch": 0.7808, + "grad_norm": 3.8166677951812744, + "learning_rate": 1.96048822417875e-05, + "loss": 0.2783, + "step": 1952 + }, + { + "epoch": 0.7816, + "grad_norm": 3.9429714679718018, + "learning_rate": 1.9597072623390668e-05, + "loss": 0.4552, + "step": 1954 + }, + { + "epoch": 0.7824, + "grad_norm": 2.631047487258911, + "learning_rate": 1.9589188165093666e-05, + "loss": 0.3029, + "step": 1956 + }, + { + "epoch": 0.7832, + "grad_norm": 2.698028087615967, + "learning_rate": 1.95812289283811e-05, + "loss": 0.1427, + "step": 1958 + }, + { + "epoch": 0.784, + "grad_norm": 6.577787399291992, + "learning_rate": 1.9573194975320672e-05, + "loss": 0.565, + "step": 1960 + }, + { + "epoch": 0.7848, + "grad_norm": 2.450500249862671, + "learning_rate": 1.9565086368562784e-05, + "loss": 0.4542, + "step": 1962 + }, + { + "epoch": 0.7856, + "grad_norm": 10.015090942382812, + "learning_rate": 1.9556903171339966e-05, + "loss": 0.3882, + "step": 1964 + }, + { + "epoch": 0.7864, + "grad_norm": 4.287123680114746, + "learning_rate": 1.954864544746643e-05, + "loss": 0.2544, + "step": 1966 + }, + { + "epoch": 0.7872, + "grad_norm": 3.965257406234741, + "learning_rate": 1.9540313261337585e-05, + "loss": 0.4182, + "step": 1968 + }, + { + "epoch": 0.788, + "grad_norm": 5.50913143157959, + "learning_rate": 1.9531906677929472e-05, + "loss": 0.3617, + "step": 1970 + }, + { + "epoch": 0.7888, + "grad_norm": 3.2805440425872803, + "learning_rate": 1.9523425762798335e-05, + "loss": 0.204, + "step": 1972 + }, + { + "epoch": 0.7896, + "grad_norm": 10.288555145263672, + "learning_rate": 1.9514870582080035e-05, + "loss": 0.6548, + "step": 1974 + }, + { + "epoch": 0.7904, + "grad_norm": 5.323610782623291, + "learning_rate": 1.95062412024896e-05, + "loss": 1.676, + "step": 1976 + }, + { + "epoch": 0.7912, + "grad_norm": 12.464410781860352, + "learning_rate": 1.9497537691320667e-05, + "loss": 0.7239, + "step": 1978 + }, + { + "epoch": 0.792, + "grad_norm": 3.5932133197784424, + "learning_rate": 1.948876011644497e-05, + "loss": 0.3674, + "step": 1980 + }, + { + "epoch": 0.7928, + "grad_norm": 3.760422945022583, + "learning_rate": 1.9479908546311787e-05, + "loss": 0.3465, + "step": 1982 + }, + { + "epoch": 0.7936, + "grad_norm": 5.273140907287598, + "learning_rate": 1.9470983049947443e-05, + "loss": 0.5971, + "step": 1984 + }, + { + "epoch": 0.7944, + "grad_norm": 2.991767168045044, + "learning_rate": 1.9461983696954767e-05, + "loss": 0.27, + "step": 1986 + }, + { + "epoch": 0.7952, + "grad_norm": 2.138972759246826, + "learning_rate": 1.9452910557512494e-05, + "loss": 0.2681, + "step": 1988 + }, + { + "epoch": 0.796, + "grad_norm": 8.971880912780762, + "learning_rate": 1.9443763702374818e-05, + "loss": 0.6808, + "step": 1990 + }, + { + "epoch": 0.7968, + "grad_norm": 2.9185991287231445, + "learning_rate": 1.9434543202870726e-05, + "loss": 0.6934, + "step": 1992 + }, + { + "epoch": 0.7976, + "grad_norm": 4.297379493713379, + "learning_rate": 1.9425249130903544e-05, + "loss": 0.3042, + "step": 1994 + }, + { + "epoch": 0.7984, + "grad_norm": 3.2408552169799805, + "learning_rate": 1.94158815589503e-05, + "loss": 0.5953, + "step": 1996 + }, + { + "epoch": 0.7992, + "grad_norm": 8.040743827819824, + "learning_rate": 1.940644056006122e-05, + "loss": 0.8196, + "step": 1998 + }, + { + "epoch": 0.8, + "grad_norm": 7.990960597991943, + "learning_rate": 1.939692620785909e-05, + "loss": 0.4612, + "step": 2000 + }, + { + "epoch": 0.8008, + "grad_norm": 6.383898735046387, + "learning_rate": 1.9387338576538746e-05, + "loss": 0.4105, + "step": 2002 + }, + { + "epoch": 0.8016, + "grad_norm": 6.999117851257324, + "learning_rate": 1.9377677740866464e-05, + "loss": 0.2251, + "step": 2004 + }, + { + "epoch": 0.8024, + "grad_norm": 7.917981147766113, + "learning_rate": 1.936794377617938e-05, + "loss": 0.7872, + "step": 2006 + }, + { + "epoch": 0.8032, + "grad_norm": 5.2557501792907715, + "learning_rate": 1.9358136758384917e-05, + "loss": 0.5837, + "step": 2008 + }, + { + "epoch": 0.804, + "grad_norm": 4.809940338134766, + "learning_rate": 1.9348256763960146e-05, + "loss": 0.6812, + "step": 2010 + }, + { + "epoch": 0.8048, + "grad_norm": 2.842546224594116, + "learning_rate": 1.9338303869951273e-05, + "loss": 0.3981, + "step": 2012 + }, + { + "epoch": 0.8056, + "grad_norm": 3.4043211936950684, + "learning_rate": 1.9328278153972943e-05, + "loss": 0.2863, + "step": 2014 + }, + { + "epoch": 0.8064, + "grad_norm": 2.5935723781585693, + "learning_rate": 1.931817969420773e-05, + "loss": 0.353, + "step": 2016 + }, + { + "epoch": 0.8072, + "grad_norm": 4.572518825531006, + "learning_rate": 1.930800856940543e-05, + "loss": 0.3879, + "step": 2018 + }, + { + "epoch": 0.808, + "grad_norm": 4.014364242553711, + "learning_rate": 1.929776485888252e-05, + "loss": 0.5561, + "step": 2020 + }, + { + "epoch": 0.8088, + "grad_norm": 4.8947906494140625, + "learning_rate": 1.9287448642521517e-05, + "loss": 0.734, + "step": 2022 + }, + { + "epoch": 0.8096, + "grad_norm": 1.952275037765503, + "learning_rate": 1.9277060000770342e-05, + "loss": 0.5817, + "step": 2024 + }, + { + "epoch": 0.8104, + "grad_norm": 4.268755912780762, + "learning_rate": 1.9266599014641727e-05, + "loss": 0.5069, + "step": 2026 + }, + { + "epoch": 0.8112, + "grad_norm": 3.921480655670166, + "learning_rate": 1.925606576571252e-05, + "loss": 0.5792, + "step": 2028 + }, + { + "epoch": 0.812, + "grad_norm": 12.108016967773438, + "learning_rate": 1.9245460336123136e-05, + "loss": 0.6826, + "step": 2030 + }, + { + "epoch": 0.8128, + "grad_norm": 3.33408522605896, + "learning_rate": 1.923478280857682e-05, + "loss": 0.464, + "step": 2032 + }, + { + "epoch": 0.8136, + "grad_norm": 1.6179202795028687, + "learning_rate": 1.9224033266339103e-05, + "loss": 0.2836, + "step": 2034 + }, + { + "epoch": 0.8144, + "grad_norm": 12.353996276855469, + "learning_rate": 1.9213211793237066e-05, + "loss": 0.7539, + "step": 2036 + }, + { + "epoch": 0.8152, + "grad_norm": 6.888246536254883, + "learning_rate": 1.9202318473658707e-05, + "loss": 0.5177, + "step": 2038 + }, + { + "epoch": 0.816, + "grad_norm": 4.788189888000488, + "learning_rate": 1.919135339255235e-05, + "loss": 0.6794, + "step": 2040 + }, + { + "epoch": 0.8168, + "grad_norm": 3.879225492477417, + "learning_rate": 1.918031663542588e-05, + "loss": 0.2968, + "step": 2042 + }, + { + "epoch": 0.8176, + "grad_norm": 2.4591026306152344, + "learning_rate": 1.916920828834617e-05, + "loss": 0.3379, + "step": 2044 + }, + { + "epoch": 0.8184, + "grad_norm": 3.039889335632324, + "learning_rate": 1.9158028437938313e-05, + "loss": 0.4206, + "step": 2046 + }, + { + "epoch": 0.8192, + "grad_norm": 9.00166130065918, + "learning_rate": 1.9146777171385057e-05, + "loss": 0.7119, + "step": 2048 + }, + { + "epoch": 0.82, + "grad_norm": 16.83409309387207, + "learning_rate": 1.913545457642601e-05, + "loss": 0.9266, + "step": 2050 + }, + { + "epoch": 0.8208, + "grad_norm": 8.59817123413086, + "learning_rate": 1.9124060741357065e-05, + "loss": 0.4125, + "step": 2052 + }, + { + "epoch": 0.8216, + "grad_norm": 4.9218525886535645, + "learning_rate": 1.911259575502963e-05, + "loss": 0.8163, + "step": 2054 + }, + { + "epoch": 0.8224, + "grad_norm": 7.36513090133667, + "learning_rate": 1.910105970684996e-05, + "loss": 0.4727, + "step": 2056 + }, + { + "epoch": 0.8232, + "grad_norm": 2.194617986679077, + "learning_rate": 1.908945268677849e-05, + "loss": 0.8017, + "step": 2058 + }, + { + "epoch": 0.824, + "grad_norm": 4.550289154052734, + "learning_rate": 1.9077774785329085e-05, + "loss": 0.3739, + "step": 2060 + }, + { + "epoch": 0.8248, + "grad_norm": 3.4990861415863037, + "learning_rate": 1.9066026093568383e-05, + "loss": 0.3745, + "step": 2062 + }, + { + "epoch": 0.8256, + "grad_norm": 8.7343111038208, + "learning_rate": 1.9054206703115013e-05, + "loss": 0.8979, + "step": 2064 + }, + { + "epoch": 0.8264, + "grad_norm": 2.711123466491699, + "learning_rate": 1.9042316706138994e-05, + "loss": 0.3762, + "step": 2066 + }, + { + "epoch": 0.8272, + "grad_norm": 1.9720872640609741, + "learning_rate": 1.903035619536087e-05, + "loss": 0.4065, + "step": 2068 + }, + { + "epoch": 0.828, + "grad_norm": 4.787201881408691, + "learning_rate": 1.901832526405114e-05, + "loss": 0.4545, + "step": 2070 + }, + { + "epoch": 0.8288, + "grad_norm": 4.116015434265137, + "learning_rate": 1.9006224006029414e-05, + "loss": 0.3861, + "step": 2072 + }, + { + "epoch": 0.8296, + "grad_norm": 4.5069451332092285, + "learning_rate": 1.899405251566371e-05, + "loss": 0.5742, + "step": 2074 + }, + { + "epoch": 0.8304, + "grad_norm": 5.005687713623047, + "learning_rate": 1.8981810887869797e-05, + "loss": 0.5577, + "step": 2076 + }, + { + "epoch": 0.8312, + "grad_norm": 5.526610374450684, + "learning_rate": 1.8969499218110302e-05, + "loss": 0.4934, + "step": 2078 + }, + { + "epoch": 0.832, + "grad_norm": 2.5126290321350098, + "learning_rate": 1.8957117602394133e-05, + "loss": 0.3066, + "step": 2080 + }, + { + "epoch": 0.8328, + "grad_norm": 9.842791557312012, + "learning_rate": 1.8944666137275596e-05, + "loss": 0.6265, + "step": 2082 + }, + { + "epoch": 0.8336, + "grad_norm": 13.449177742004395, + "learning_rate": 1.8932144919853744e-05, + "loss": 0.7168, + "step": 2084 + }, + { + "epoch": 0.8344, + "grad_norm": 4.880086421966553, + "learning_rate": 1.8919554047771508e-05, + "loss": 0.3834, + "step": 2086 + }, + { + "epoch": 0.8352, + "grad_norm": 5.4077019691467285, + "learning_rate": 1.890689361921507e-05, + "loss": 0.3614, + "step": 2088 + }, + { + "epoch": 0.836, + "grad_norm": 2.8501031398773193, + "learning_rate": 1.8894163732912986e-05, + "loss": 0.4492, + "step": 2090 + }, + { + "epoch": 0.8368, + "grad_norm": 5.37775993347168, + "learning_rate": 1.8881364488135445e-05, + "loss": 0.4278, + "step": 2092 + }, + { + "epoch": 0.8376, + "grad_norm": 2.3796370029449463, + "learning_rate": 1.886849598469357e-05, + "loss": 0.3203, + "step": 2094 + }, + { + "epoch": 0.8384, + "grad_norm": 5.196391582489014, + "learning_rate": 1.8855558322938492e-05, + "loss": 0.3425, + "step": 2096 + }, + { + "epoch": 0.8392, + "grad_norm": 5.295304775238037, + "learning_rate": 1.8842551603760725e-05, + "loss": 0.5215, + "step": 2098 + }, + { + "epoch": 0.84, + "grad_norm": 4.4182562828063965, + "learning_rate": 1.8829475928589265e-05, + "loss": 0.4456, + "step": 2100 + }, + { + "epoch": 0.8408, + "grad_norm": 3.1044559478759766, + "learning_rate": 1.8816331399390874e-05, + "loss": 0.3528, + "step": 2102 + }, + { + "epoch": 0.8416, + "grad_norm": 2.9102487564086914, + "learning_rate": 1.88031181186692e-05, + "loss": 0.34, + "step": 2104 + }, + { + "epoch": 0.8424, + "grad_norm": 6.911310195922852, + "learning_rate": 1.8789836189464092e-05, + "loss": 0.5755, + "step": 2106 + }, + { + "epoch": 0.8432, + "grad_norm": 5.777722358703613, + "learning_rate": 1.877648571535068e-05, + "loss": 0.8788, + "step": 2108 + }, + { + "epoch": 0.844, + "grad_norm": 5.194482326507568, + "learning_rate": 1.8763066800438638e-05, + "loss": 0.6766, + "step": 2110 + }, + { + "epoch": 0.8448, + "grad_norm": 10.82712459564209, + "learning_rate": 1.8749579549371387e-05, + "loss": 0.9228, + "step": 2112 + }, + { + "epoch": 0.8456, + "grad_norm": 4.030005931854248, + "learning_rate": 1.8736024067325188e-05, + "loss": 0.4271, + "step": 2114 + }, + { + "epoch": 0.8464, + "grad_norm": 5.070348262786865, + "learning_rate": 1.8722400460008437e-05, + "loss": 0.5131, + "step": 2116 + }, + { + "epoch": 0.8472, + "grad_norm": 3.887665033340454, + "learning_rate": 1.8708708833660748e-05, + "loss": 2.6075, + "step": 2118 + }, + { + "epoch": 0.848, + "grad_norm": 4.675127983093262, + "learning_rate": 1.8694949295052198e-05, + "loss": 0.7133, + "step": 2120 + }, + { + "epoch": 0.8488, + "grad_norm": 4.268080711364746, + "learning_rate": 1.868112195148239e-05, + "loss": 0.5845, + "step": 2122 + }, + { + "epoch": 0.8496, + "grad_norm": 3.9360458850860596, + "learning_rate": 1.866722691077977e-05, + "loss": 0.4167, + "step": 2124 + }, + { + "epoch": 0.8504, + "grad_norm": 20.160083770751953, + "learning_rate": 1.8653264281300626e-05, + "loss": 0.9318, + "step": 2126 + }, + { + "epoch": 0.8512, + "grad_norm": 4.777723789215088, + "learning_rate": 1.8639234171928355e-05, + "loss": 0.4258, + "step": 2128 + }, + { + "epoch": 0.852, + "grad_norm": 3.6808910369873047, + "learning_rate": 1.8625136692072587e-05, + "loss": 0.4202, + "step": 2130 + }, + { + "epoch": 0.8528, + "grad_norm": 8.434818267822266, + "learning_rate": 1.8610971951668265e-05, + "loss": 0.7677, + "step": 2132 + }, + { + "epoch": 0.8536, + "grad_norm": 8.012714385986328, + "learning_rate": 1.8596740061174912e-05, + "loss": 0.5678, + "step": 2134 + }, + { + "epoch": 0.8544, + "grad_norm": 9.045832633972168, + "learning_rate": 1.858244113157566e-05, + "loss": 0.3242, + "step": 2136 + }, + { + "epoch": 0.8552, + "grad_norm": 6.075767993927002, + "learning_rate": 1.8568075274376432e-05, + "loss": 0.5078, + "step": 2138 + }, + { + "epoch": 0.856, + "grad_norm": 3.5789854526519775, + "learning_rate": 1.8553642601605083e-05, + "loss": 0.3583, + "step": 2140 + }, + { + "epoch": 0.8568, + "grad_norm": 3.273469924926758, + "learning_rate": 1.8539143225810457e-05, + "loss": 0.162, + "step": 2142 + }, + { + "epoch": 0.8576, + "grad_norm": 6.589645862579346, + "learning_rate": 1.852457726006163e-05, + "loss": 0.7334, + "step": 2144 + }, + { + "epoch": 0.8584, + "grad_norm": 6.523824691772461, + "learning_rate": 1.8509944817946917e-05, + "loss": 0.4509, + "step": 2146 + }, + { + "epoch": 0.8592, + "grad_norm": 4.779366970062256, + "learning_rate": 1.8495246013573064e-05, + "loss": 0.2784, + "step": 2148 + }, + { + "epoch": 0.86, + "grad_norm": 2.8005824089050293, + "learning_rate": 1.848048096156426e-05, + "loss": 0.3085, + "step": 2150 + }, + { + "epoch": 0.8608, + "grad_norm": 4.007396221160889, + "learning_rate": 1.8465649777061387e-05, + "loss": 0.6944, + "step": 2152 + }, + { + "epoch": 0.8616, + "grad_norm": 7.373164653778076, + "learning_rate": 1.8450752575720967e-05, + "loss": 0.4137, + "step": 2154 + }, + { + "epoch": 0.8624, + "grad_norm": 2.44792103767395, + "learning_rate": 1.843578947371439e-05, + "loss": 0.494, + "step": 2156 + }, + { + "epoch": 0.8632, + "grad_norm": 5.441621780395508, + "learning_rate": 1.8420760587726935e-05, + "loss": 1.3811, + "step": 2158 + }, + { + "epoch": 0.864, + "grad_norm": 14.928495407104492, + "learning_rate": 1.8405666034956846e-05, + "loss": 0.6873, + "step": 2160 + }, + { + "epoch": 0.8648, + "grad_norm": 19.143177032470703, + "learning_rate": 1.8390505933114507e-05, + "loss": 0.9384, + "step": 2162 + }, + { + "epoch": 0.8656, + "grad_norm": 2.426013708114624, + "learning_rate": 1.8375280400421414e-05, + "loss": 0.4808, + "step": 2164 + }, + { + "epoch": 0.8664, + "grad_norm": 4.136850833892822, + "learning_rate": 1.8359989555609365e-05, + "loss": 0.32, + "step": 2166 + }, + { + "epoch": 0.8672, + "grad_norm": 7.991090774536133, + "learning_rate": 1.834463351791939e-05, + "loss": 0.7678, + "step": 2168 + }, + { + "epoch": 0.868, + "grad_norm": 4.490537643432617, + "learning_rate": 1.8329212407101006e-05, + "loss": 0.5311, + "step": 2170 + }, + { + "epoch": 0.8688, + "grad_norm": 7.613470554351807, + "learning_rate": 1.8313726343411085e-05, + "loss": 0.4121, + "step": 2172 + }, + { + "epoch": 0.8696, + "grad_norm": 5.197305679321289, + "learning_rate": 1.82981754476131e-05, + "loss": 0.1904, + "step": 2174 + }, + { + "epoch": 0.8704, + "grad_norm": 9.690563201904297, + "learning_rate": 1.8282559840976053e-05, + "loss": 0.4502, + "step": 2176 + }, + { + "epoch": 0.8712, + "grad_norm": 2.421522855758667, + "learning_rate": 1.8266879645273557e-05, + "loss": 0.2495, + "step": 2178 + }, + { + "epoch": 0.872, + "grad_norm": 7.8181328773498535, + "learning_rate": 1.8251134982782966e-05, + "loss": 0.2981, + "step": 2180 + }, + { + "epoch": 0.8728, + "grad_norm": 1.5006805658340454, + "learning_rate": 1.823532597628428e-05, + "loss": 0.3555, + "step": 2182 + }, + { + "epoch": 0.8736, + "grad_norm": 3.146165609359741, + "learning_rate": 1.8219452749059336e-05, + "loss": 0.3498, + "step": 2184 + }, + { + "epoch": 0.8744, + "grad_norm": 2.798628807067871, + "learning_rate": 1.8203515424890734e-05, + "loss": 0.5941, + "step": 2186 + }, + { + "epoch": 0.8752, + "grad_norm": 7.344174861907959, + "learning_rate": 1.8187514128060956e-05, + "loss": 0.8017, + "step": 2188 + }, + { + "epoch": 0.876, + "grad_norm": 1.4742686748504639, + "learning_rate": 1.8171448983351284e-05, + "loss": 0.2445, + "step": 2190 + }, + { + "epoch": 0.8768, + "grad_norm": 3.6886837482452393, + "learning_rate": 1.8155320116040983e-05, + "loss": 0.1941, + "step": 2192 + }, + { + "epoch": 0.8776, + "grad_norm": 10.147557258605957, + "learning_rate": 1.8139127651906193e-05, + "loss": 0.7069, + "step": 2194 + }, + { + "epoch": 0.8784, + "grad_norm": 2.233205556869507, + "learning_rate": 1.8122871717218974e-05, + "loss": 0.438, + "step": 2196 + }, + { + "epoch": 0.8792, + "grad_norm": 5.252257823944092, + "learning_rate": 1.8106552438746413e-05, + "loss": 0.7512, + "step": 2198 + }, + { + "epoch": 0.88, + "grad_norm": 8.280484199523926, + "learning_rate": 1.8090169943749477e-05, + "loss": 1.0446, + "step": 2200 + }, + { + "epoch": 0.8808, + "grad_norm": 8.377373695373535, + "learning_rate": 1.807372435998219e-05, + "loss": 0.4684, + "step": 2202 + }, + { + "epoch": 0.8816, + "grad_norm": 4.66433048248291, + "learning_rate": 1.8057215815690487e-05, + "loss": 0.3269, + "step": 2204 + }, + { + "epoch": 0.8824, + "grad_norm": 2.217611312866211, + "learning_rate": 1.8040644439611355e-05, + "loss": 0.5373, + "step": 2206 + }, + { + "epoch": 0.8832, + "grad_norm": 5.229759216308594, + "learning_rate": 1.8024010360971665e-05, + "loss": 0.3007, + "step": 2208 + }, + { + "epoch": 0.884, + "grad_norm": 5.907240390777588, + "learning_rate": 1.8007313709487345e-05, + "loss": 0.4164, + "step": 2210 + }, + { + "epoch": 0.8848, + "grad_norm": 6.11362886428833, + "learning_rate": 1.7990554615362207e-05, + "loss": 0.2909, + "step": 2212 + }, + { + "epoch": 0.8856, + "grad_norm": 2.218219757080078, + "learning_rate": 1.7973733209287036e-05, + "loss": 0.3034, + "step": 2214 + }, + { + "epoch": 0.8864, + "grad_norm": 2.290370464324951, + "learning_rate": 1.7956849622438568e-05, + "loss": 0.2474, + "step": 2216 + }, + { + "epoch": 0.8872, + "grad_norm": 3.0256340503692627, + "learning_rate": 1.7939903986478357e-05, + "loss": 0.5, + "step": 2218 + }, + { + "epoch": 0.888, + "grad_norm": 7.33586311340332, + "learning_rate": 1.7922896433551913e-05, + "loss": 0.7149, + "step": 2220 + }, + { + "epoch": 0.8888, + "grad_norm": 2.595695734024048, + "learning_rate": 1.7905827096287525e-05, + "loss": 0.24, + "step": 2222 + }, + { + "epoch": 0.8896, + "grad_norm": 4.993026256561279, + "learning_rate": 1.7888696107795347e-05, + "loss": 0.4084, + "step": 2224 + }, + { + "epoch": 0.8904, + "grad_norm": 13.632013320922852, + "learning_rate": 1.787150360166623e-05, + "loss": 0.5648, + "step": 2226 + }, + { + "epoch": 0.8912, + "grad_norm": 4.771726608276367, + "learning_rate": 1.7854249711970826e-05, + "loss": 0.3186, + "step": 2228 + }, + { + "epoch": 0.892, + "grad_norm": 1.7299787998199463, + "learning_rate": 1.783693457325841e-05, + "loss": 0.1849, + "step": 2230 + }, + { + "epoch": 0.8928, + "grad_norm": 5.569767951965332, + "learning_rate": 1.7819558320555902e-05, + "loss": 0.5359, + "step": 2232 + }, + { + "epoch": 0.8936, + "grad_norm": 2.1846365928649902, + "learning_rate": 1.780212108936685e-05, + "loss": 0.2436, + "step": 2234 + }, + { + "epoch": 0.8944, + "grad_norm": 5.118314266204834, + "learning_rate": 1.7784623015670237e-05, + "loss": 0.4241, + "step": 2236 + }, + { + "epoch": 0.8952, + "grad_norm": 8.755192756652832, + "learning_rate": 1.7767064235919594e-05, + "loss": 0.5717, + "step": 2238 + }, + { + "epoch": 0.896, + "grad_norm": 8.251045227050781, + "learning_rate": 1.77494448870418e-05, + "loss": 0.6775, + "step": 2240 + }, + { + "epoch": 0.8968, + "grad_norm": 2.6442363262176514, + "learning_rate": 1.773176510643608e-05, + "loss": 0.3537, + "step": 2242 + }, + { + "epoch": 0.8976, + "grad_norm": 2.8393073081970215, + "learning_rate": 1.7714025031972894e-05, + "loss": 0.3711, + "step": 2244 + }, + { + "epoch": 0.8984, + "grad_norm": 4.766880035400391, + "learning_rate": 1.769622480199295e-05, + "loss": 0.3779, + "step": 2246 + }, + { + "epoch": 0.8992, + "grad_norm": 10.22143840789795, + "learning_rate": 1.7678364555305982e-05, + "loss": 0.5814, + "step": 2248 + }, + { + "epoch": 0.9, + "grad_norm": 9.3465576171875, + "learning_rate": 1.7660444431189777e-05, + "loss": 0.4841, + "step": 2250 + }, + { + "epoch": 0.9008, + "grad_norm": 2.9356963634490967, + "learning_rate": 1.76424645693891e-05, + "loss": 0.4369, + "step": 2252 + }, + { + "epoch": 0.9016, + "grad_norm": 6.478868007659912, + "learning_rate": 1.762442511011448e-05, + "loss": 0.6658, + "step": 2254 + }, + { + "epoch": 0.9024, + "grad_norm": 4.515495777130127, + "learning_rate": 1.7606326194041285e-05, + "loss": 0.2635, + "step": 2256 + }, + { + "epoch": 0.9032, + "grad_norm": 3.4764370918273926, + "learning_rate": 1.7588167962308458e-05, + "loss": 0.3208, + "step": 2258 + }, + { + "epoch": 0.904, + "grad_norm": 4.561454772949219, + "learning_rate": 1.756995055651757e-05, + "loss": 0.5108, + "step": 2260 + }, + { + "epoch": 0.9048, + "grad_norm": 4.654769420623779, + "learning_rate": 1.7551674118731585e-05, + "loss": 0.3701, + "step": 2262 + }, + { + "epoch": 0.9056, + "grad_norm": 10.341426849365234, + "learning_rate": 1.7533338791473875e-05, + "loss": 0.6947, + "step": 2264 + }, + { + "epoch": 0.9064, + "grad_norm": 7.0824809074401855, + "learning_rate": 1.751494471772697e-05, + "loss": 0.4752, + "step": 2266 + }, + { + "epoch": 0.9072, + "grad_norm": 6.04065465927124, + "learning_rate": 1.7496492040931548e-05, + "loss": 0.3313, + "step": 2268 + }, + { + "epoch": 0.908, + "grad_norm": 3.093017578125, + "learning_rate": 1.747798090498533e-05, + "loss": 0.2161, + "step": 2270 + }, + { + "epoch": 0.9088, + "grad_norm": 3.0049962997436523, + "learning_rate": 1.745941145424182e-05, + "loss": 0.6934, + "step": 2272 + }, + { + "epoch": 0.9096, + "grad_norm": 2.6591107845306396, + "learning_rate": 1.744078383350938e-05, + "loss": 0.3383, + "step": 2274 + }, + { + "epoch": 0.9104, + "grad_norm": 9.022825241088867, + "learning_rate": 1.7422098188049885e-05, + "loss": 0.5028, + "step": 2276 + }, + { + "epoch": 0.9112, + "grad_norm": 7.651523590087891, + "learning_rate": 1.7403354663577782e-05, + "loss": 0.8057, + "step": 2278 + }, + { + "epoch": 0.912, + "grad_norm": 5.598511219024658, + "learning_rate": 1.738455340625883e-05, + "loss": 0.7146, + "step": 2280 + }, + { + "epoch": 0.9128, + "grad_norm": 1.7681468725204468, + "learning_rate": 1.7365694562709038e-05, + "loss": 0.5604, + "step": 2282 + }, + { + "epoch": 0.9136, + "grad_norm": 4.347034454345703, + "learning_rate": 1.7346778279993433e-05, + "loss": 0.354, + "step": 2284 + }, + { + "epoch": 0.9144, + "grad_norm": 4.9559149742126465, + "learning_rate": 1.7327804705624962e-05, + "loss": 0.404, + "step": 2286 + }, + { + "epoch": 0.9152, + "grad_norm": 2.3942646980285645, + "learning_rate": 1.730877398756341e-05, + "loss": 0.6259, + "step": 2288 + }, + { + "epoch": 0.916, + "grad_norm": 6.6393351554870605, + "learning_rate": 1.7289686274214113e-05, + "loss": 0.3538, + "step": 2290 + }, + { + "epoch": 0.9168, + "grad_norm": 3.790372610092163, + "learning_rate": 1.727054171442693e-05, + "loss": 0.3411, + "step": 2292 + }, + { + "epoch": 0.9176, + "grad_norm": 4.417578220367432, + "learning_rate": 1.7251340457494934e-05, + "loss": 0.3735, + "step": 2294 + }, + { + "epoch": 0.9184, + "grad_norm": 2.050532579421997, + "learning_rate": 1.7232082653153422e-05, + "loss": 0.2166, + "step": 2296 + }, + { + "epoch": 0.9192, + "grad_norm": 4.665719032287598, + "learning_rate": 1.7212768451578595e-05, + "loss": 0.2797, + "step": 2298 + }, + { + "epoch": 0.92, + "grad_norm": 3.98673415184021, + "learning_rate": 1.7193398003386517e-05, + "loss": 0.3617, + "step": 2300 + }, + { + "epoch": 0.9208, + "grad_norm": 3.666666030883789, + "learning_rate": 1.7173971459631803e-05, + "loss": 0.5871, + "step": 2302 + }, + { + "epoch": 0.9216, + "grad_norm": 4.225686550140381, + "learning_rate": 1.7154488971806525e-05, + "loss": 0.3627, + "step": 2304 + }, + { + "epoch": 0.9224, + "grad_norm": 2.3536264896392822, + "learning_rate": 1.713495069183907e-05, + "loss": 0.3145, + "step": 2306 + }, + { + "epoch": 0.9232, + "grad_norm": 2.9840025901794434, + "learning_rate": 1.7115356772092847e-05, + "loss": 0.5521, + "step": 2308 + }, + { + "epoch": 0.924, + "grad_norm": 3.0804269313812256, + "learning_rate": 1.709570736536522e-05, + "loss": 0.4238, + "step": 2310 + }, + { + "epoch": 0.9248, + "grad_norm": 2.455007791519165, + "learning_rate": 1.7076002624886152e-05, + "loss": 0.2472, + "step": 2312 + }, + { + "epoch": 0.9256, + "grad_norm": 7.137457847595215, + "learning_rate": 1.705624270431722e-05, + "loss": 0.6622, + "step": 2314 + }, + { + "epoch": 0.9264, + "grad_norm": 1.5967859029769897, + "learning_rate": 1.70364277577502e-05, + "loss": 0.1157, + "step": 2316 + }, + { + "epoch": 0.9272, + "grad_norm": 4.234429836273193, + "learning_rate": 1.7016557939706078e-05, + "loss": 0.3578, + "step": 2318 + }, + { + "epoch": 0.928, + "grad_norm": 4.573144435882568, + "learning_rate": 1.6996633405133673e-05, + "loss": 0.288, + "step": 2320 + }, + { + "epoch": 0.9288, + "grad_norm": 10.306396484375, + "learning_rate": 1.6976654309408468e-05, + "loss": 0.6993, + "step": 2322 + }, + { + "epoch": 0.9296, + "grad_norm": 10.781827926635742, + "learning_rate": 1.6956620808331515e-05, + "loss": 0.735, + "step": 2324 + }, + { + "epoch": 0.9304, + "grad_norm": 3.1664340496063232, + "learning_rate": 1.6936533058128042e-05, + "loss": 0.4528, + "step": 2326 + }, + { + "epoch": 0.9312, + "grad_norm": 7.0673298835754395, + "learning_rate": 1.691639121544641e-05, + "loss": 0.3242, + "step": 2328 + }, + { + "epoch": 0.932, + "grad_norm": 8.26816463470459, + "learning_rate": 1.6896195437356696e-05, + "loss": 0.6315, + "step": 2330 + }, + { + "epoch": 0.9328, + "grad_norm": 4.574904441833496, + "learning_rate": 1.6875945881349686e-05, + "loss": 0.4652, + "step": 2332 + }, + { + "epoch": 0.9336, + "grad_norm": 4.3775506019592285, + "learning_rate": 1.6855642705335435e-05, + "loss": 0.4898, + "step": 2334 + }, + { + "epoch": 0.9344, + "grad_norm": 4.384169578552246, + "learning_rate": 1.6835286067642228e-05, + "loss": 0.7099, + "step": 2336 + }, + { + "epoch": 0.9352, + "grad_norm": 4.223312854766846, + "learning_rate": 1.681487612701521e-05, + "loss": 0.5002, + "step": 2338 + }, + { + "epoch": 0.936, + "grad_norm": 5.141642093658447, + "learning_rate": 1.6794413042615168e-05, + "loss": 0.4201, + "step": 2340 + }, + { + "epoch": 0.9368, + "grad_norm": 4.047835350036621, + "learning_rate": 1.677389697401739e-05, + "loss": 0.578, + "step": 2342 + }, + { + "epoch": 0.9376, + "grad_norm": 2.148810863494873, + "learning_rate": 1.675332808121025e-05, + "loss": 0.2867, + "step": 2344 + }, + { + "epoch": 0.9384, + "grad_norm": 6.967923164367676, + "learning_rate": 1.6732706524594145e-05, + "loss": 0.8902, + "step": 2346 + }, + { + "epoch": 0.9392, + "grad_norm": 9.913086891174316, + "learning_rate": 1.671203246498009e-05, + "loss": 0.4933, + "step": 2348 + }, + { + "epoch": 0.94, + "grad_norm": 3.1417829990386963, + "learning_rate": 1.6691306063588593e-05, + "loss": 0.3506, + "step": 2350 + }, + { + "epoch": 0.9408, + "grad_norm": 14.28138256072998, + "learning_rate": 1.6670527482048242e-05, + "loss": 1.1406, + "step": 2352 + }, + { + "epoch": 0.9416, + "grad_norm": 2.6398777961730957, + "learning_rate": 1.6649696882394635e-05, + "loss": 0.2026, + "step": 2354 + }, + { + "epoch": 0.9424, + "grad_norm": 3.2635467052459717, + "learning_rate": 1.6628814427068968e-05, + "loss": 0.7803, + "step": 2356 + }, + { + "epoch": 0.9432, + "grad_norm": 2.1623849868774414, + "learning_rate": 1.6607880278916778e-05, + "loss": 0.2551, + "step": 2358 + }, + { + "epoch": 0.944, + "grad_norm": 2.4618899822235107, + "learning_rate": 1.6586894601186824e-05, + "loss": 0.4137, + "step": 2360 + }, + { + "epoch": 0.9448, + "grad_norm": 2.293994903564453, + "learning_rate": 1.656585755752957e-05, + "loss": 0.2418, + "step": 2362 + }, + { + "epoch": 0.9456, + "grad_norm": 2.1502673625946045, + "learning_rate": 1.6544769311996153e-05, + "loss": 0.3008, + "step": 2364 + }, + { + "epoch": 0.9464, + "grad_norm": 2.998347520828247, + "learning_rate": 1.6523630029036924e-05, + "loss": 0.4707, + "step": 2366 + }, + { + "epoch": 0.9472, + "grad_norm": 5.802120208740234, + "learning_rate": 1.6502439873500294e-05, + "loss": 0.4726, + "step": 2368 + }, + { + "epoch": 0.948, + "grad_norm": 11.09598159790039, + "learning_rate": 1.6481199010631305e-05, + "loss": 0.5139, + "step": 2370 + }, + { + "epoch": 0.9488, + "grad_norm": 5.688537120819092, + "learning_rate": 1.645990760607052e-05, + "loss": 0.3321, + "step": 2372 + }, + { + "epoch": 0.9496, + "grad_norm": 1.9983233213424683, + "learning_rate": 1.643856582585255e-05, + "loss": 0.394, + "step": 2374 + }, + { + "epoch": 0.9504, + "grad_norm": 6.963872909545898, + "learning_rate": 1.641717383640488e-05, + "loss": 0.3209, + "step": 2376 + }, + { + "epoch": 0.9512, + "grad_norm": 2.2653205394744873, + "learning_rate": 1.6395731804546596e-05, + "loss": 0.1529, + "step": 2378 + }, + { + "epoch": 0.952, + "grad_norm": 3.29524564743042, + "learning_rate": 1.63742398974869e-05, + "loss": 0.3794, + "step": 2380 + }, + { + "epoch": 0.9528, + "grad_norm": 2.852288007736206, + "learning_rate": 1.6352698282824045e-05, + "loss": 0.2458, + "step": 2382 + }, + { + "epoch": 0.9536, + "grad_norm": 2.9293675422668457, + "learning_rate": 1.633110712854385e-05, + "loss": 0.3333, + "step": 2384 + }, + { + "epoch": 0.9544, + "grad_norm": 10.415287017822266, + "learning_rate": 1.6309466603018504e-05, + "loss": 0.5087, + "step": 2386 + }, + { + "epoch": 0.9552, + "grad_norm": 1.27485191822052, + "learning_rate": 1.6287776875005148e-05, + "loss": 0.2743, + "step": 2388 + }, + { + "epoch": 0.956, + "grad_norm": 2.6245243549346924, + "learning_rate": 1.6266038113644612e-05, + "loss": 0.516, + "step": 2390 + }, + { + "epoch": 0.9568, + "grad_norm": 4.452570915222168, + "learning_rate": 1.624425048846017e-05, + "loss": 0.4682, + "step": 2392 + }, + { + "epoch": 0.9576, + "grad_norm": 2.626039743423462, + "learning_rate": 1.6222414169356063e-05, + "loss": 0.2863, + "step": 2394 + }, + { + "epoch": 0.9584, + "grad_norm": 2.5585098266601562, + "learning_rate": 1.6200529326616343e-05, + "loss": 0.4538, + "step": 2396 + }, + { + "epoch": 0.9592, + "grad_norm": 5.5422539710998535, + "learning_rate": 1.6178596130903345e-05, + "loss": 0.4687, + "step": 2398 + }, + { + "epoch": 0.96, + "grad_norm": 8.677401542663574, + "learning_rate": 1.6156614753256587e-05, + "loss": 0.2982, + "step": 2400 + }, + { + "epoch": 0.9608, + "grad_norm": 3.8012046813964844, + "learning_rate": 1.613458536509123e-05, + "loss": 0.2614, + "step": 2402 + }, + { + "epoch": 0.9616, + "grad_norm": 2.2905969619750977, + "learning_rate": 1.6112508138196922e-05, + "loss": 0.7023, + "step": 2404 + }, + { + "epoch": 0.9624, + "grad_norm": 2.1353418827056885, + "learning_rate": 1.6090383244736277e-05, + "loss": 0.423, + "step": 2406 + }, + { + "epoch": 0.9632, + "grad_norm": 1.8343054056167603, + "learning_rate": 1.606821085724363e-05, + "loss": 0.4421, + "step": 2408 + }, + { + "epoch": 0.964, + "grad_norm": 2.907493829727173, + "learning_rate": 1.6045991148623756e-05, + "loss": 0.3788, + "step": 2410 + }, + { + "epoch": 0.9648, + "grad_norm": 7.708912372589111, + "learning_rate": 1.602372429215038e-05, + "loss": 0.556, + "step": 2412 + }, + { + "epoch": 0.9656, + "grad_norm": 2.3259804248809814, + "learning_rate": 1.600141046146497e-05, + "loss": 0.2895, + "step": 2414 + }, + { + "epoch": 0.9664, + "grad_norm": 4.717739105224609, + "learning_rate": 1.597904983057519e-05, + "loss": 0.8317, + "step": 2416 + }, + { + "epoch": 0.9672, + "grad_norm": 3.4694042205810547, + "learning_rate": 1.5956642573853794e-05, + "loss": 0.2237, + "step": 2418 + }, + { + "epoch": 0.968, + "grad_norm": 6.2816033363342285, + "learning_rate": 1.5934188866037014e-05, + "loss": 0.2499, + "step": 2420 + }, + { + "epoch": 0.9688, + "grad_norm": 3.9102213382720947, + "learning_rate": 1.591168888222342e-05, + "loss": 0.4237, + "step": 2422 + }, + { + "epoch": 0.9696, + "grad_norm": 9.37143611907959, + "learning_rate": 1.5889142797872407e-05, + "loss": 0.5436, + "step": 2424 + }, + { + "epoch": 0.9704, + "grad_norm": 2.8910293579101562, + "learning_rate": 1.5866550788802818e-05, + "loss": 0.3519, + "step": 2426 + }, + { + "epoch": 0.9712, + "grad_norm": 3.6150565147399902, + "learning_rate": 1.584391303119173e-05, + "loss": 0.1368, + "step": 2428 + }, + { + "epoch": 0.972, + "grad_norm": 7.551123142242432, + "learning_rate": 1.582122970157289e-05, + "loss": 0.4774, + "step": 2430 + }, + { + "epoch": 0.9728, + "grad_norm": 8.42343807220459, + "learning_rate": 1.5798500976835503e-05, + "loss": 0.7228, + "step": 2432 + }, + { + "epoch": 0.9736, + "grad_norm": 5.844367027282715, + "learning_rate": 1.577572703422267e-05, + "loss": 0.3905, + "step": 2434 + }, + { + "epoch": 0.9744, + "grad_norm": 4.236822605133057, + "learning_rate": 1.575290805133024e-05, + "loss": 0.5367, + "step": 2436 + }, + { + "epoch": 0.9752, + "grad_norm": 5.707516670227051, + "learning_rate": 1.5730044206105156e-05, + "loss": 1.0149, + "step": 2438 + }, + { + "epoch": 0.976, + "grad_norm": 11.354463577270508, + "learning_rate": 1.570713567684432e-05, + "loss": 0.8809, + "step": 2440 + }, + { + "epoch": 0.9768, + "grad_norm": 4.4314446449279785, + "learning_rate": 1.5684182642193047e-05, + "loss": 0.7045, + "step": 2442 + }, + { + "epoch": 0.9776, + "grad_norm": 2.725847005844116, + "learning_rate": 1.566118528114367e-05, + "loss": 0.2321, + "step": 2444 + }, + { + "epoch": 0.9784, + "grad_norm": 3.7199370861053467, + "learning_rate": 1.563814377303429e-05, + "loss": 0.4067, + "step": 2446 + }, + { + "epoch": 0.9792, + "grad_norm": 3.164414167404175, + "learning_rate": 1.561505829754715e-05, + "loss": 0.3257, + "step": 2448 + }, + { + "epoch": 0.98, + "grad_norm": 2.3418188095092773, + "learning_rate": 1.5591929034707475e-05, + "loss": 0.4852, + "step": 2450 + }, + { + "epoch": 0.9808, + "grad_norm": 2.900113582611084, + "learning_rate": 1.5568756164881874e-05, + "loss": 0.3542, + "step": 2452 + }, + { + "epoch": 0.9816, + "grad_norm": 1.9147764444351196, + "learning_rate": 1.5545539868777085e-05, + "loss": 0.2348, + "step": 2454 + }, + { + "epoch": 0.9824, + "grad_norm": 4.320040702819824, + "learning_rate": 1.5522280327438384e-05, + "loss": 0.5722, + "step": 2456 + }, + { + "epoch": 0.9832, + "grad_norm": 2.6387248039245605, + "learning_rate": 1.5498977722248398e-05, + "loss": 0.3449, + "step": 2458 + }, + { + "epoch": 0.984, + "grad_norm": 1.3307088613510132, + "learning_rate": 1.547563223492552e-05, + "loss": 0.3057, + "step": 2460 + }, + { + "epoch": 0.9848, + "grad_norm": 3.4244158267974854, + "learning_rate": 1.5452244047522504e-05, + "loss": 0.4097, + "step": 2462 + }, + { + "epoch": 0.9856, + "grad_norm": 3.6758108139038086, + "learning_rate": 1.5428813342425194e-05, + "loss": 0.4778, + "step": 2464 + }, + { + "epoch": 0.9864, + "grad_norm": 26.934717178344727, + "learning_rate": 1.5405340302350876e-05, + "loss": 1.4515, + "step": 2466 + }, + { + "epoch": 0.9872, + "grad_norm": 5.977929592132568, + "learning_rate": 1.538182511034708e-05, + "loss": 0.4473, + "step": 2468 + }, + { + "epoch": 0.988, + "grad_norm": 1.2147676944732666, + "learning_rate": 1.535826794978996e-05, + "loss": 0.3552, + "step": 2470 + }, + { + "epoch": 0.9888, + "grad_norm": 5.006214618682861, + "learning_rate": 1.5334669004383036e-05, + "loss": 0.9009, + "step": 2472 + }, + { + "epoch": 0.9896, + "grad_norm": 8.950480461120605, + "learning_rate": 1.5311028458155564e-05, + "loss": 0.9034, + "step": 2474 + }, + { + "epoch": 0.9904, + "grad_norm": 3.063087224960327, + "learning_rate": 1.528734649546133e-05, + "loss": 0.4858, + "step": 2476 + }, + { + "epoch": 0.9912, + "grad_norm": 9.52176284790039, + "learning_rate": 1.5263623300976997e-05, + "loss": 0.4892, + "step": 2478 + }, + { + "epoch": 0.992, + "grad_norm": 1.4161584377288818, + "learning_rate": 1.5239859059700792e-05, + "loss": 0.2002, + "step": 2480 + }, + { + "epoch": 0.9928, + "grad_norm": 5.208809852600098, + "learning_rate": 1.5216053956951096e-05, + "loss": 0.5075, + "step": 2482 + }, + { + "epoch": 0.9936, + "grad_norm": 5.600852966308594, + "learning_rate": 1.5192208178364819e-05, + "loss": 0.5732, + "step": 2484 + }, + { + "epoch": 0.9944, + "grad_norm": 3.0066847801208496, + "learning_rate": 1.5168321909896176e-05, + "loss": 0.331, + "step": 2486 + }, + { + "epoch": 0.9952, + "grad_norm": 3.0223422050476074, + "learning_rate": 1.5144395337815057e-05, + "loss": 0.2142, + "step": 2488 + }, + { + "epoch": 0.996, + "grad_norm": 2.803382635116577, + "learning_rate": 1.5120428648705722e-05, + "loss": 0.2565, + "step": 2490 + }, + { + "epoch": 0.9968, + "grad_norm": 7.8674445152282715, + "learning_rate": 1.5096422029465171e-05, + "loss": 0.6374, + "step": 2492 + }, + { + "epoch": 0.9976, + "grad_norm": 5.142702579498291, + "learning_rate": 1.5072375667301904e-05, + "loss": 0.414, + "step": 2494 + }, + { + "epoch": 0.9984, + "grad_norm": 5.461601734161377, + "learning_rate": 1.5048289749734231e-05, + "loss": 0.4925, + "step": 2496 + }, + { + "epoch": 0.9992, + "grad_norm": 7.165497303009033, + "learning_rate": 1.502416446458898e-05, + "loss": 0.496, + "step": 2498 + }, + { + "epoch": 1.0, + "grad_norm": 3.7376537322998047, + "learning_rate": 1.5000000000000014e-05, + "loss": 0.3629, + "step": 2500 + }, + { + "epoch": 1.0, + "step": 2500, + "total_flos": 1.0020465781768192e+16, + "train_loss": 0.5193727528691292, + "train_runtime": 17799.0301, + "train_samples_per_second": 2.247, + "train_steps_per_second": 0.14 + } + ], + "logging_steps": 2, + "max_steps": 2500, + "num_input_tokens_seen": 0, + "num_train_epochs": 1, + "save_steps": 500, + "stateful_callbacks": {}, + "total_flos": 1.0020465781768192e+16, + "train_batch_size": 1, + "trial_name": null, + "trial_params": null +} diff --git a/client_states_v9_NEURIPS_DISJOINT_Memonly_LORA_llava_lr2e-5_bs1_gradacc32_iter0_25_gradnorm_scenario18_new_10000_random0_0625_seed1/server_model_round0.pth b/client_states_v9_NEURIPS_DISJOINT_Memonly_LORA_llava_lr2e-5_bs1_gradacc32_iter0_25_gradnorm_scenario18_new_10000_random0_0625_seed1/server_model_round0.pth new file mode 100644 index 0000000000000000000000000000000000000000..0f5bc49911711b3fc42970112cdf1d9bb1d4315a --- /dev/null +++ b/client_states_v9_NEURIPS_DISJOINT_Memonly_LORA_llava_lr2e-5_bs1_gradacc32_iter0_25_gradnorm_scenario18_new_10000_random0_0625_seed1/server_model_round0.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a0b639778d4e1f7aae966d0b330009ecbda922dd8869a6613e0eda2954c0915b +size 639793378 diff --git a/client_states_v9_NEURIPS_DISJOINT_Memonly_LORA_llava_lr2e-5_bs1_gradacc32_iter0_25_gradnorm_scenario18_new_10000_random0_0625_seed1/server_model_round1.pth b/client_states_v9_NEURIPS_DISJOINT_Memonly_LORA_llava_lr2e-5_bs1_gradacc32_iter0_25_gradnorm_scenario18_new_10000_random0_0625_seed1/server_model_round1.pth new file mode 100644 index 0000000000000000000000000000000000000000..b511825b8b9e3b1dac247f297e0874c999b89776 --- /dev/null +++ b/client_states_v9_NEURIPS_DISJOINT_Memonly_LORA_llava_lr2e-5_bs1_gradacc32_iter0_25_gradnorm_scenario18_new_10000_random0_0625_seed1/server_model_round1.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e08f705eab7c3e575cee2a2e93a8f7e06e7514bce97e6eb4353b7448c6472262 +size 639793378 diff --git a/client_states_v9_NEURIPS_DISJOINT_Memonly_LORA_llava_lr2e-5_bs1_gradacc32_iter0_25_gradnorm_scenario18_new_10000_random0_0625_seed1/server_model_round2.pth b/client_states_v9_NEURIPS_DISJOINT_Memonly_LORA_llava_lr2e-5_bs1_gradacc32_iter0_25_gradnorm_scenario18_new_10000_random0_0625_seed1/server_model_round2.pth new file mode 100644 index 0000000000000000000000000000000000000000..9a820d6dc33485218ef15ad29b849dcc17e3924c --- /dev/null +++ b/client_states_v9_NEURIPS_DISJOINT_Memonly_LORA_llava_lr2e-5_bs1_gradacc32_iter0_25_gradnorm_scenario18_new_10000_random0_0625_seed1/server_model_round2.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e535c21196e01af3ba8bd5b2d6cad8060cb4f93528c909e6610c444f3678e52b +size 639793378 diff --git a/client_states_v9_NEURIPS_DISJOINT_Memonly_LORA_llava_lr2e-5_bs1_gradacc32_iter0_25_gradnorm_scenario18_new_10000_random0_0625_seed1/server_model_round3.pth b/client_states_v9_NEURIPS_DISJOINT_Memonly_LORA_llava_lr2e-5_bs1_gradacc32_iter0_25_gradnorm_scenario18_new_10000_random0_0625_seed1/server_model_round3.pth new file mode 100644 index 0000000000000000000000000000000000000000..f4ac6d9874913fa17651737eacc2204b1e2ee761 --- /dev/null +++ b/client_states_v9_NEURIPS_DISJOINT_Memonly_LORA_llava_lr2e-5_bs1_gradacc32_iter0_25_gradnorm_scenario18_new_10000_random0_0625_seed1/server_model_round3.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bde55ce76a5fd1470d6214ecfa14613869e50b34f417538b17eae2e496c9f7c6 +size 639793378