diff --git "a/trainer_state.json" "b/trainer_state.json" new file mode 100644--- /dev/null +++ "b/trainer_state.json" @@ -0,0 +1,10185 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 2.9979310344827588, + "eval_steps": 500, + "global_step": 1449, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0020689655172413794, + "grad_norm": 8.16248893737793, + "learning_rate": 6.896551724137931e-08, + "loss": 1.3047, + "step": 1 + }, + { + "epoch": 0.004137931034482759, + "grad_norm": 8.501221656799316, + "learning_rate": 1.3793103448275863e-07, + "loss": 1.3364, + "step": 2 + }, + { + "epoch": 0.006206896551724138, + "grad_norm": 8.676358222961426, + "learning_rate": 2.0689655172413796e-07, + "loss": 1.3572, + "step": 3 + }, + { + "epoch": 0.008275862068965517, + "grad_norm": 8.630147933959961, + "learning_rate": 2.7586206896551726e-07, + "loss": 1.3387, + "step": 4 + }, + { + "epoch": 0.010344827586206896, + "grad_norm": 8.223169326782227, + "learning_rate": 3.4482758620689656e-07, + "loss": 1.3156, + "step": 5 + }, + { + "epoch": 0.012413793103448275, + "grad_norm": 8.330610275268555, + "learning_rate": 4.137931034482759e-07, + "loss": 1.2883, + "step": 6 + }, + { + "epoch": 0.014482758620689656, + "grad_norm": 9.017132759094238, + "learning_rate": 4.827586206896552e-07, + "loss": 1.3781, + "step": 7 + }, + { + "epoch": 0.016551724137931035, + "grad_norm": 8.214015007019043, + "learning_rate": 5.517241379310345e-07, + "loss": 1.2708, + "step": 8 + }, + { + "epoch": 0.018620689655172412, + "grad_norm": 8.30260181427002, + "learning_rate": 6.206896551724139e-07, + "loss": 1.327, + "step": 9 + }, + { + "epoch": 0.020689655172413793, + "grad_norm": 7.864175319671631, + "learning_rate": 6.896551724137931e-07, + "loss": 1.2873, + "step": 10 + }, + { + "epoch": 0.022758620689655173, + "grad_norm": 7.549825191497803, + "learning_rate": 7.586206896551725e-07, + "loss": 1.2672, + "step": 11 + }, + { + "epoch": 0.02482758620689655, + "grad_norm": 7.3428802490234375, + "learning_rate": 8.275862068965518e-07, + "loss": 1.2312, + "step": 12 + }, + { + "epoch": 0.02689655172413793, + "grad_norm": 6.168758392333984, + "learning_rate": 8.965517241379311e-07, + "loss": 1.2331, + "step": 13 + }, + { + "epoch": 0.028965517241379312, + "grad_norm": 6.219021320343018, + "learning_rate": 9.655172413793103e-07, + "loss": 1.2173, + "step": 14 + }, + { + "epoch": 0.03103448275862069, + "grad_norm": 5.889226913452148, + "learning_rate": 1.0344827586206898e-06, + "loss": 1.2164, + "step": 15 + }, + { + "epoch": 0.03310344827586207, + "grad_norm": 5.6153411865234375, + "learning_rate": 1.103448275862069e-06, + "loss": 1.2284, + "step": 16 + }, + { + "epoch": 0.03517241379310345, + "grad_norm": 3.499351978302002, + "learning_rate": 1.1724137931034483e-06, + "loss": 1.1728, + "step": 17 + }, + { + "epoch": 0.037241379310344824, + "grad_norm": 3.140091896057129, + "learning_rate": 1.2413793103448277e-06, + "loss": 1.1509, + "step": 18 + }, + { + "epoch": 0.039310344827586205, + "grad_norm": 3.1030735969543457, + "learning_rate": 1.3103448275862072e-06, + "loss": 1.1557, + "step": 19 + }, + { + "epoch": 0.041379310344827586, + "grad_norm": 2.808349370956421, + "learning_rate": 1.3793103448275862e-06, + "loss": 1.099, + "step": 20 + }, + { + "epoch": 0.043448275862068966, + "grad_norm": 2.8193469047546387, + "learning_rate": 1.4482758620689657e-06, + "loss": 1.1728, + "step": 21 + }, + { + "epoch": 0.04551724137931035, + "grad_norm": 2.7212672233581543, + "learning_rate": 1.517241379310345e-06, + "loss": 1.1511, + "step": 22 + }, + { + "epoch": 0.04758620689655172, + "grad_norm": 2.5926826000213623, + "learning_rate": 1.5862068965517244e-06, + "loss": 1.1317, + "step": 23 + }, + { + "epoch": 0.0496551724137931, + "grad_norm": 3.0901269912719727, + "learning_rate": 1.6551724137931037e-06, + "loss": 1.0843, + "step": 24 + }, + { + "epoch": 0.05172413793103448, + "grad_norm": 3.495481252670288, + "learning_rate": 1.724137931034483e-06, + "loss": 1.1062, + "step": 25 + }, + { + "epoch": 0.05379310344827586, + "grad_norm": 3.160388231277466, + "learning_rate": 1.7931034482758622e-06, + "loss": 1.0713, + "step": 26 + }, + { + "epoch": 0.05586206896551724, + "grad_norm": 3.0356714725494385, + "learning_rate": 1.8620689655172416e-06, + "loss": 1.0513, + "step": 27 + }, + { + "epoch": 0.057931034482758624, + "grad_norm": 2.9434399604797363, + "learning_rate": 1.9310344827586207e-06, + "loss": 1.0907, + "step": 28 + }, + { + "epoch": 0.06, + "grad_norm": 2.5999321937561035, + "learning_rate": 2.0000000000000003e-06, + "loss": 1.0615, + "step": 29 + }, + { + "epoch": 0.06206896551724138, + "grad_norm": 2.188593864440918, + "learning_rate": 2.0689655172413796e-06, + "loss": 1.0549, + "step": 30 + }, + { + "epoch": 0.06413793103448276, + "grad_norm": 1.513140320777893, + "learning_rate": 2.137931034482759e-06, + "loss": 1.0179, + "step": 31 + }, + { + "epoch": 0.06620689655172414, + "grad_norm": 1.3605451583862305, + "learning_rate": 2.206896551724138e-06, + "loss": 1.0188, + "step": 32 + }, + { + "epoch": 0.06827586206896552, + "grad_norm": 1.7352983951568604, + "learning_rate": 2.2758620689655173e-06, + "loss": 1.0239, + "step": 33 + }, + { + "epoch": 0.0703448275862069, + "grad_norm": 1.7656395435333252, + "learning_rate": 2.3448275862068966e-06, + "loss": 0.9941, + "step": 34 + }, + { + "epoch": 0.07241379310344828, + "grad_norm": 1.684338092803955, + "learning_rate": 2.4137931034482762e-06, + "loss": 0.9742, + "step": 35 + }, + { + "epoch": 0.07448275862068965, + "grad_norm": 1.4101715087890625, + "learning_rate": 2.4827586206896555e-06, + "loss": 0.9809, + "step": 36 + }, + { + "epoch": 0.07655172413793103, + "grad_norm": 1.2714829444885254, + "learning_rate": 2.5517241379310347e-06, + "loss": 0.9693, + "step": 37 + }, + { + "epoch": 0.07862068965517241, + "grad_norm": 1.1786231994628906, + "learning_rate": 2.6206896551724144e-06, + "loss": 0.9956, + "step": 38 + }, + { + "epoch": 0.08068965517241379, + "grad_norm": 1.0718473196029663, + "learning_rate": 2.6896551724137932e-06, + "loss": 0.9836, + "step": 39 + }, + { + "epoch": 0.08275862068965517, + "grad_norm": 0.9965543746948242, + "learning_rate": 2.7586206896551725e-06, + "loss": 0.9186, + "step": 40 + }, + { + "epoch": 0.08482758620689655, + "grad_norm": 0.9652248620986938, + "learning_rate": 2.827586206896552e-06, + "loss": 0.9265, + "step": 41 + }, + { + "epoch": 0.08689655172413793, + "grad_norm": 1.0196917057037354, + "learning_rate": 2.8965517241379314e-06, + "loss": 0.9483, + "step": 42 + }, + { + "epoch": 0.08896551724137931, + "grad_norm": 1.2638320922851562, + "learning_rate": 2.9655172413793102e-06, + "loss": 0.9579, + "step": 43 + }, + { + "epoch": 0.0910344827586207, + "grad_norm": 1.0733128786087036, + "learning_rate": 3.03448275862069e-06, + "loss": 0.9739, + "step": 44 + }, + { + "epoch": 0.09310344827586207, + "grad_norm": 0.9054429531097412, + "learning_rate": 3.103448275862069e-06, + "loss": 0.9671, + "step": 45 + }, + { + "epoch": 0.09517241379310344, + "grad_norm": 0.9127644896507263, + "learning_rate": 3.172413793103449e-06, + "loss": 0.9145, + "step": 46 + }, + { + "epoch": 0.09724137931034482, + "grad_norm": 0.8835800886154175, + "learning_rate": 3.2413793103448277e-06, + "loss": 0.9169, + "step": 47 + }, + { + "epoch": 0.0993103448275862, + "grad_norm": 1.0521382093429565, + "learning_rate": 3.3103448275862073e-06, + "loss": 0.9341, + "step": 48 + }, + { + "epoch": 0.10137931034482758, + "grad_norm": 0.8984640836715698, + "learning_rate": 3.3793103448275866e-06, + "loss": 0.9007, + "step": 49 + }, + { + "epoch": 0.10344827586206896, + "grad_norm": 0.8801516890525818, + "learning_rate": 3.448275862068966e-06, + "loss": 0.9447, + "step": 50 + }, + { + "epoch": 0.10551724137931034, + "grad_norm": 0.873184859752655, + "learning_rate": 3.517241379310345e-06, + "loss": 0.9134, + "step": 51 + }, + { + "epoch": 0.10758620689655173, + "grad_norm": 0.8689625263214111, + "learning_rate": 3.5862068965517243e-06, + "loss": 0.9404, + "step": 52 + }, + { + "epoch": 0.1096551724137931, + "grad_norm": 0.8642359972000122, + "learning_rate": 3.655172413793104e-06, + "loss": 0.8982, + "step": 53 + }, + { + "epoch": 0.11172413793103449, + "grad_norm": 0.9032179117202759, + "learning_rate": 3.7241379310344832e-06, + "loss": 0.8848, + "step": 54 + }, + { + "epoch": 0.11379310344827587, + "grad_norm": 0.8974177837371826, + "learning_rate": 3.793103448275862e-06, + "loss": 0.9485, + "step": 55 + }, + { + "epoch": 0.11586206896551725, + "grad_norm": 0.9621221423149109, + "learning_rate": 3.862068965517241e-06, + "loss": 0.8727, + "step": 56 + }, + { + "epoch": 0.11793103448275861, + "grad_norm": 0.9366543889045715, + "learning_rate": 3.931034482758621e-06, + "loss": 0.926, + "step": 57 + }, + { + "epoch": 0.12, + "grad_norm": 0.8639587163925171, + "learning_rate": 4.000000000000001e-06, + "loss": 0.8623, + "step": 58 + }, + { + "epoch": 0.12206896551724138, + "grad_norm": 0.9308229684829712, + "learning_rate": 4.0689655172413795e-06, + "loss": 0.8973, + "step": 59 + }, + { + "epoch": 0.12413793103448276, + "grad_norm": 0.8033000230789185, + "learning_rate": 4.137931034482759e-06, + "loss": 0.8861, + "step": 60 + }, + { + "epoch": 0.12620689655172415, + "grad_norm": 0.9005990028381348, + "learning_rate": 4.206896551724138e-06, + "loss": 0.8823, + "step": 61 + }, + { + "epoch": 0.12827586206896552, + "grad_norm": 0.9020888805389404, + "learning_rate": 4.275862068965518e-06, + "loss": 0.8914, + "step": 62 + }, + { + "epoch": 0.13034482758620689, + "grad_norm": 0.8798072338104248, + "learning_rate": 4.3448275862068965e-06, + "loss": 0.9076, + "step": 63 + }, + { + "epoch": 0.13241379310344828, + "grad_norm": 0.8254591226577759, + "learning_rate": 4.413793103448276e-06, + "loss": 0.9246, + "step": 64 + }, + { + "epoch": 0.13448275862068965, + "grad_norm": 0.7802205085754395, + "learning_rate": 4.482758620689656e-06, + "loss": 0.8635, + "step": 65 + }, + { + "epoch": 0.13655172413793104, + "grad_norm": 0.8493502736091614, + "learning_rate": 4.551724137931035e-06, + "loss": 0.8793, + "step": 66 + }, + { + "epoch": 0.1386206896551724, + "grad_norm": 0.8410840034484863, + "learning_rate": 4.620689655172414e-06, + "loss": 0.8593, + "step": 67 + }, + { + "epoch": 0.1406896551724138, + "grad_norm": 0.796180009841919, + "learning_rate": 4.689655172413793e-06, + "loss": 0.8797, + "step": 68 + }, + { + "epoch": 0.14275862068965517, + "grad_norm": 0.7212627530097961, + "learning_rate": 4.758620689655173e-06, + "loss": 0.8661, + "step": 69 + }, + { + "epoch": 0.14482758620689656, + "grad_norm": 0.8363891243934631, + "learning_rate": 4.8275862068965525e-06, + "loss": 0.8679, + "step": 70 + }, + { + "epoch": 0.14689655172413793, + "grad_norm": 0.921846866607666, + "learning_rate": 4.896551724137931e-06, + "loss": 0.8643, + "step": 71 + }, + { + "epoch": 0.1489655172413793, + "grad_norm": 0.7880343794822693, + "learning_rate": 4.965517241379311e-06, + "loss": 0.878, + "step": 72 + }, + { + "epoch": 0.1510344827586207, + "grad_norm": 0.8128662705421448, + "learning_rate": 5.03448275862069e-06, + "loss": 0.8772, + "step": 73 + }, + { + "epoch": 0.15310344827586206, + "grad_norm": 0.8667871952056885, + "learning_rate": 5.1034482758620695e-06, + "loss": 0.8731, + "step": 74 + }, + { + "epoch": 0.15517241379310345, + "grad_norm": 0.8730148673057556, + "learning_rate": 5.172413793103449e-06, + "loss": 0.876, + "step": 75 + }, + { + "epoch": 0.15724137931034482, + "grad_norm": 0.8205820918083191, + "learning_rate": 5.241379310344829e-06, + "loss": 0.8415, + "step": 76 + }, + { + "epoch": 0.15931034482758621, + "grad_norm": 0.8799615502357483, + "learning_rate": 5.310344827586207e-06, + "loss": 0.8482, + "step": 77 + }, + { + "epoch": 0.16137931034482758, + "grad_norm": 0.936702311038971, + "learning_rate": 5.3793103448275865e-06, + "loss": 0.8677, + "step": 78 + }, + { + "epoch": 0.16344827586206898, + "grad_norm": 0.8674628734588623, + "learning_rate": 5.448275862068966e-06, + "loss": 0.8973, + "step": 79 + }, + { + "epoch": 0.16551724137931034, + "grad_norm": 0.8501123785972595, + "learning_rate": 5.517241379310345e-06, + "loss": 0.8603, + "step": 80 + }, + { + "epoch": 0.16758620689655174, + "grad_norm": 0.9434123039245605, + "learning_rate": 5.586206896551725e-06, + "loss": 0.8983, + "step": 81 + }, + { + "epoch": 0.1696551724137931, + "grad_norm": 0.8477321267127991, + "learning_rate": 5.655172413793104e-06, + "loss": 0.8621, + "step": 82 + }, + { + "epoch": 0.17172413793103447, + "grad_norm": 0.8743482232093811, + "learning_rate": 5.724137931034483e-06, + "loss": 0.8854, + "step": 83 + }, + { + "epoch": 0.17379310344827587, + "grad_norm": 0.7993690967559814, + "learning_rate": 5.793103448275863e-06, + "loss": 0.8605, + "step": 84 + }, + { + "epoch": 0.17586206896551723, + "grad_norm": 0.8073793649673462, + "learning_rate": 5.862068965517242e-06, + "loss": 0.8867, + "step": 85 + }, + { + "epoch": 0.17793103448275863, + "grad_norm": 0.8699036240577698, + "learning_rate": 5.9310344827586205e-06, + "loss": 0.8752, + "step": 86 + }, + { + "epoch": 0.18, + "grad_norm": 0.9213085770606995, + "learning_rate": 6e-06, + "loss": 0.8601, + "step": 87 + }, + { + "epoch": 0.1820689655172414, + "grad_norm": 0.8272461295127869, + "learning_rate": 6.06896551724138e-06, + "loss": 0.8613, + "step": 88 + }, + { + "epoch": 0.18413793103448275, + "grad_norm": 0.8762076497077942, + "learning_rate": 6.1379310344827595e-06, + "loss": 0.8755, + "step": 89 + }, + { + "epoch": 0.18620689655172415, + "grad_norm": 0.8086516261100769, + "learning_rate": 6.206896551724138e-06, + "loss": 0.8779, + "step": 90 + }, + { + "epoch": 0.18827586206896552, + "grad_norm": 0.8407052755355835, + "learning_rate": 6.275862068965518e-06, + "loss": 0.869, + "step": 91 + }, + { + "epoch": 0.19034482758620688, + "grad_norm": 0.8412452340126038, + "learning_rate": 6.344827586206898e-06, + "loss": 0.8937, + "step": 92 + }, + { + "epoch": 0.19241379310344828, + "grad_norm": 0.8644373416900635, + "learning_rate": 6.413793103448276e-06, + "loss": 0.8627, + "step": 93 + }, + { + "epoch": 0.19448275862068964, + "grad_norm": 0.867426335811615, + "learning_rate": 6.482758620689655e-06, + "loss": 0.8714, + "step": 94 + }, + { + "epoch": 0.19655172413793104, + "grad_norm": 0.8877378702163696, + "learning_rate": 6.551724137931035e-06, + "loss": 0.8603, + "step": 95 + }, + { + "epoch": 0.1986206896551724, + "grad_norm": 0.7677062749862671, + "learning_rate": 6.620689655172415e-06, + "loss": 0.8273, + "step": 96 + }, + { + "epoch": 0.2006896551724138, + "grad_norm": 0.8652222752571106, + "learning_rate": 6.6896551724137935e-06, + "loss": 0.8786, + "step": 97 + }, + { + "epoch": 0.20275862068965517, + "grad_norm": 0.8587886691093445, + "learning_rate": 6.758620689655173e-06, + "loss": 0.8541, + "step": 98 + }, + { + "epoch": 0.20482758620689656, + "grad_norm": 0.8325220942497253, + "learning_rate": 6.827586206896553e-06, + "loss": 0.8389, + "step": 99 + }, + { + "epoch": 0.20689655172413793, + "grad_norm": 0.9126191735267639, + "learning_rate": 6.896551724137932e-06, + "loss": 0.8588, + "step": 100 + }, + { + "epoch": 0.20896551724137932, + "grad_norm": 0.8659350872039795, + "learning_rate": 6.9655172413793105e-06, + "loss": 0.8649, + "step": 101 + }, + { + "epoch": 0.2110344827586207, + "grad_norm": 0.80833899974823, + "learning_rate": 7.03448275862069e-06, + "loss": 0.8668, + "step": 102 + }, + { + "epoch": 0.21310344827586206, + "grad_norm": 0.8443086743354797, + "learning_rate": 7.103448275862069e-06, + "loss": 0.852, + "step": 103 + }, + { + "epoch": 0.21517241379310345, + "grad_norm": 0.9743650555610657, + "learning_rate": 7.172413793103449e-06, + "loss": 0.8727, + "step": 104 + }, + { + "epoch": 0.21724137931034482, + "grad_norm": 0.8710004687309265, + "learning_rate": 7.241379310344828e-06, + "loss": 0.8605, + "step": 105 + }, + { + "epoch": 0.2193103448275862, + "grad_norm": 0.9044391512870789, + "learning_rate": 7.310344827586208e-06, + "loss": 0.8395, + "step": 106 + }, + { + "epoch": 0.22137931034482758, + "grad_norm": 0.7920845746994019, + "learning_rate": 7.379310344827587e-06, + "loss": 0.8361, + "step": 107 + }, + { + "epoch": 0.22344827586206897, + "grad_norm": 0.9043503403663635, + "learning_rate": 7.4482758620689665e-06, + "loss": 0.8326, + "step": 108 + }, + { + "epoch": 0.22551724137931034, + "grad_norm": 0.914540708065033, + "learning_rate": 7.517241379310345e-06, + "loss": 0.8592, + "step": 109 + }, + { + "epoch": 0.22758620689655173, + "grad_norm": 0.889606773853302, + "learning_rate": 7.586206896551724e-06, + "loss": 0.871, + "step": 110 + }, + { + "epoch": 0.2296551724137931, + "grad_norm": 0.8037165403366089, + "learning_rate": 7.655172413793104e-06, + "loss": 0.8059, + "step": 111 + }, + { + "epoch": 0.2317241379310345, + "grad_norm": 0.9440395832061768, + "learning_rate": 7.724137931034483e-06, + "loss": 0.8511, + "step": 112 + }, + { + "epoch": 0.23379310344827586, + "grad_norm": 0.8397315740585327, + "learning_rate": 7.793103448275863e-06, + "loss": 0.8363, + "step": 113 + }, + { + "epoch": 0.23586206896551723, + "grad_norm": 0.9024421572685242, + "learning_rate": 7.862068965517242e-06, + "loss": 0.8276, + "step": 114 + }, + { + "epoch": 0.23793103448275862, + "grad_norm": 0.9368509650230408, + "learning_rate": 7.93103448275862e-06, + "loss": 0.8896, + "step": 115 + }, + { + "epoch": 0.24, + "grad_norm": 0.9322966933250427, + "learning_rate": 8.000000000000001e-06, + "loss": 0.8054, + "step": 116 + }, + { + "epoch": 0.24206896551724139, + "grad_norm": 0.9048160910606384, + "learning_rate": 8.06896551724138e-06, + "loss": 0.8448, + "step": 117 + }, + { + "epoch": 0.24413793103448275, + "grad_norm": 0.8558708429336548, + "learning_rate": 8.137931034482759e-06, + "loss": 0.8275, + "step": 118 + }, + { + "epoch": 0.24620689655172415, + "grad_norm": 0.8424068093299866, + "learning_rate": 8.206896551724138e-06, + "loss": 0.8694, + "step": 119 + }, + { + "epoch": 0.2482758620689655, + "grad_norm": 1.001757025718689, + "learning_rate": 8.275862068965518e-06, + "loss": 0.8453, + "step": 120 + }, + { + "epoch": 0.2503448275862069, + "grad_norm": 0.9482073783874512, + "learning_rate": 8.344827586206897e-06, + "loss": 0.8696, + "step": 121 + }, + { + "epoch": 0.2524137931034483, + "grad_norm": 0.9771032929420471, + "learning_rate": 8.413793103448276e-06, + "loss": 0.8373, + "step": 122 + }, + { + "epoch": 0.25448275862068964, + "grad_norm": 1.0352799892425537, + "learning_rate": 8.482758620689656e-06, + "loss": 0.837, + "step": 123 + }, + { + "epoch": 0.25655172413793104, + "grad_norm": 0.9529556035995483, + "learning_rate": 8.551724137931035e-06, + "loss": 0.8286, + "step": 124 + }, + { + "epoch": 0.25862068965517243, + "grad_norm": 0.8518896698951721, + "learning_rate": 8.620689655172414e-06, + "loss": 0.847, + "step": 125 + }, + { + "epoch": 0.26068965517241377, + "grad_norm": 0.8487024903297424, + "learning_rate": 8.689655172413793e-06, + "loss": 0.8257, + "step": 126 + }, + { + "epoch": 0.26275862068965516, + "grad_norm": 1.0520756244659424, + "learning_rate": 8.758620689655173e-06, + "loss": 0.823, + "step": 127 + }, + { + "epoch": 0.26482758620689656, + "grad_norm": 1.002064824104309, + "learning_rate": 8.827586206896552e-06, + "loss": 0.8618, + "step": 128 + }, + { + "epoch": 0.26689655172413795, + "grad_norm": 0.9487592577934265, + "learning_rate": 8.896551724137931e-06, + "loss": 0.8344, + "step": 129 + }, + { + "epoch": 0.2689655172413793, + "grad_norm": 0.9966208338737488, + "learning_rate": 8.965517241379312e-06, + "loss": 0.8372, + "step": 130 + }, + { + "epoch": 0.2710344827586207, + "grad_norm": 0.8904616832733154, + "learning_rate": 9.03448275862069e-06, + "loss": 0.8547, + "step": 131 + }, + { + "epoch": 0.2731034482758621, + "grad_norm": 0.8750537037849426, + "learning_rate": 9.10344827586207e-06, + "loss": 0.8423, + "step": 132 + }, + { + "epoch": 0.2751724137931034, + "grad_norm": 1.0919158458709717, + "learning_rate": 9.172413793103448e-06, + "loss": 0.8295, + "step": 133 + }, + { + "epoch": 0.2772413793103448, + "grad_norm": 0.8703680038452148, + "learning_rate": 9.241379310344829e-06, + "loss": 0.7951, + "step": 134 + }, + { + "epoch": 0.2793103448275862, + "grad_norm": 0.8651219606399536, + "learning_rate": 9.310344827586207e-06, + "loss": 0.8656, + "step": 135 + }, + { + "epoch": 0.2813793103448276, + "grad_norm": 1.1044590473175049, + "learning_rate": 9.379310344827586e-06, + "loss": 0.8359, + "step": 136 + }, + { + "epoch": 0.28344827586206894, + "grad_norm": 0.8297178149223328, + "learning_rate": 9.448275862068967e-06, + "loss": 0.8055, + "step": 137 + }, + { + "epoch": 0.28551724137931034, + "grad_norm": 0.8873213529586792, + "learning_rate": 9.517241379310346e-06, + "loss": 0.8346, + "step": 138 + }, + { + "epoch": 0.28758620689655173, + "grad_norm": 0.9231070280075073, + "learning_rate": 9.586206896551724e-06, + "loss": 0.8647, + "step": 139 + }, + { + "epoch": 0.2896551724137931, + "grad_norm": 0.9084100127220154, + "learning_rate": 9.655172413793105e-06, + "loss": 0.8342, + "step": 140 + }, + { + "epoch": 0.29172413793103447, + "grad_norm": 0.9144700765609741, + "learning_rate": 9.724137931034484e-06, + "loss": 0.8468, + "step": 141 + }, + { + "epoch": 0.29379310344827586, + "grad_norm": 0.9257712364196777, + "learning_rate": 9.793103448275863e-06, + "loss": 0.8192, + "step": 142 + }, + { + "epoch": 0.29586206896551726, + "grad_norm": 0.8467687368392944, + "learning_rate": 9.862068965517241e-06, + "loss": 0.827, + "step": 143 + }, + { + "epoch": 0.2979310344827586, + "grad_norm": 0.925663411617279, + "learning_rate": 9.931034482758622e-06, + "loss": 0.8436, + "step": 144 + }, + { + "epoch": 0.3, + "grad_norm": 0.9307589530944824, + "learning_rate": 1e-05, + "loss": 0.7953, + "step": 145 + }, + { + "epoch": 0.3020689655172414, + "grad_norm": 0.9580163955688477, + "learning_rate": 9.999985489433723e-06, + "loss": 0.829, + "step": 146 + }, + { + "epoch": 0.3041379310344828, + "grad_norm": 0.8726505637168884, + "learning_rate": 9.999941957819112e-06, + "loss": 0.8571, + "step": 147 + }, + { + "epoch": 0.3062068965517241, + "grad_norm": 0.934336245059967, + "learning_rate": 9.999869405408833e-06, + "loss": 0.8127, + "step": 148 + }, + { + "epoch": 0.3082758620689655, + "grad_norm": 1.0289698839187622, + "learning_rate": 9.999767832624e-06, + "loss": 0.8592, + "step": 149 + }, + { + "epoch": 0.3103448275862069, + "grad_norm": 0.8795868754386902, + "learning_rate": 9.999637240054163e-06, + "loss": 0.8207, + "step": 150 + }, + { + "epoch": 0.3124137931034483, + "grad_norm": 0.8612985610961914, + "learning_rate": 9.99947762845731e-06, + "loss": 0.8548, + "step": 151 + }, + { + "epoch": 0.31448275862068964, + "grad_norm": 0.90495365858078, + "learning_rate": 9.999288998759865e-06, + "loss": 0.8346, + "step": 152 + }, + { + "epoch": 0.31655172413793103, + "grad_norm": 1.010977029800415, + "learning_rate": 9.999071352056676e-06, + "loss": 0.8504, + "step": 153 + }, + { + "epoch": 0.31862068965517243, + "grad_norm": 0.9661422967910767, + "learning_rate": 9.998824689611012e-06, + "loss": 0.8264, + "step": 154 + }, + { + "epoch": 0.32068965517241377, + "grad_norm": 1.0099589824676514, + "learning_rate": 9.998549012854562e-06, + "loss": 0.8054, + "step": 155 + }, + { + "epoch": 0.32275862068965516, + "grad_norm": 0.9441735744476318, + "learning_rate": 9.998244323387412e-06, + "loss": 0.803, + "step": 156 + }, + { + "epoch": 0.32482758620689656, + "grad_norm": 0.9308339953422546, + "learning_rate": 9.99791062297805e-06, + "loss": 0.8267, + "step": 157 + }, + { + "epoch": 0.32689655172413795, + "grad_norm": 0.9326705932617188, + "learning_rate": 9.997547913563352e-06, + "loss": 0.8291, + "step": 158 + }, + { + "epoch": 0.3289655172413793, + "grad_norm": 0.8982317447662354, + "learning_rate": 9.997156197248564e-06, + "loss": 0.837, + "step": 159 + }, + { + "epoch": 0.3310344827586207, + "grad_norm": 0.93187016248703, + "learning_rate": 9.996735476307292e-06, + "loss": 0.812, + "step": 160 + }, + { + "epoch": 0.3331034482758621, + "grad_norm": 0.9523342251777649, + "learning_rate": 9.996285753181499e-06, + "loss": 0.8545, + "step": 161 + }, + { + "epoch": 0.3351724137931035, + "grad_norm": 0.8485278487205505, + "learning_rate": 9.995807030481481e-06, + "loss": 0.8405, + "step": 162 + }, + { + "epoch": 0.3372413793103448, + "grad_norm": 0.9064662456512451, + "learning_rate": 9.99529931098585e-06, + "loss": 0.8164, + "step": 163 + }, + { + "epoch": 0.3393103448275862, + "grad_norm": 0.8660606145858765, + "learning_rate": 9.99476259764153e-06, + "loss": 0.816, + "step": 164 + }, + { + "epoch": 0.3413793103448276, + "grad_norm": 0.9558955430984497, + "learning_rate": 9.994196893563722e-06, + "loss": 0.8365, + "step": 165 + }, + { + "epoch": 0.34344827586206894, + "grad_norm": 0.838666558265686, + "learning_rate": 9.993602202035902e-06, + "loss": 0.7915, + "step": 166 + }, + { + "epoch": 0.34551724137931034, + "grad_norm": 0.8363468050956726, + "learning_rate": 9.992978526509794e-06, + "loss": 0.7789, + "step": 167 + }, + { + "epoch": 0.34758620689655173, + "grad_norm": 0.8468877077102661, + "learning_rate": 9.992325870605353e-06, + "loss": 0.8306, + "step": 168 + }, + { + "epoch": 0.3496551724137931, + "grad_norm": 0.8892214298248291, + "learning_rate": 9.991644238110741e-06, + "loss": 0.8433, + "step": 169 + }, + { + "epoch": 0.35172413793103446, + "grad_norm": 0.9307257533073425, + "learning_rate": 9.990933632982307e-06, + "loss": 0.8402, + "step": 170 + }, + { + "epoch": 0.35379310344827586, + "grad_norm": 0.8565515279769897, + "learning_rate": 9.990194059344565e-06, + "loss": 0.8403, + "step": 171 + }, + { + "epoch": 0.35586206896551725, + "grad_norm": 0.89169842004776, + "learning_rate": 9.989425521490169e-06, + "loss": 0.8387, + "step": 172 + }, + { + "epoch": 0.35793103448275865, + "grad_norm": 0.9352335333824158, + "learning_rate": 9.988628023879883e-06, + "loss": 0.8292, + "step": 173 + }, + { + "epoch": 0.36, + "grad_norm": 0.8871222734451294, + "learning_rate": 9.987801571142569e-06, + "loss": 0.8245, + "step": 174 + }, + { + "epoch": 0.3620689655172414, + "grad_norm": 0.7852333188056946, + "learning_rate": 9.98694616807514e-06, + "loss": 0.8523, + "step": 175 + }, + { + "epoch": 0.3641379310344828, + "grad_norm": 0.763839840888977, + "learning_rate": 9.986061819642554e-06, + "loss": 0.8027, + "step": 176 + }, + { + "epoch": 0.3662068965517241, + "grad_norm": 0.9269424080848694, + "learning_rate": 9.985148530977767e-06, + "loss": 0.8507, + "step": 177 + }, + { + "epoch": 0.3682758620689655, + "grad_norm": 0.9032247066497803, + "learning_rate": 9.984206307381714e-06, + "loss": 0.82, + "step": 178 + }, + { + "epoch": 0.3703448275862069, + "grad_norm": 0.8867712020874023, + "learning_rate": 9.983235154323276e-06, + "loss": 0.8395, + "step": 179 + }, + { + "epoch": 0.3724137931034483, + "grad_norm": 0.8791627883911133, + "learning_rate": 9.982235077439242e-06, + "loss": 0.8677, + "step": 180 + }, + { + "epoch": 0.37448275862068964, + "grad_norm": 0.9103829264640808, + "learning_rate": 9.981206082534287e-06, + "loss": 0.8136, + "step": 181 + }, + { + "epoch": 0.37655172413793103, + "grad_norm": 0.8131744265556335, + "learning_rate": 9.98014817558093e-06, + "loss": 0.835, + "step": 182 + }, + { + "epoch": 0.3786206896551724, + "grad_norm": 0.8920416235923767, + "learning_rate": 9.979061362719502e-06, + "loss": 0.7958, + "step": 183 + }, + { + "epoch": 0.38068965517241377, + "grad_norm": 0.8333418369293213, + "learning_rate": 9.977945650258111e-06, + "loss": 0.8238, + "step": 184 + }, + { + "epoch": 0.38275862068965516, + "grad_norm": 0.8242124915122986, + "learning_rate": 9.976801044672608e-06, + "loss": 0.8412, + "step": 185 + }, + { + "epoch": 0.38482758620689655, + "grad_norm": 0.9200892448425293, + "learning_rate": 9.975627552606538e-06, + "loss": 0.8429, + "step": 186 + }, + { + "epoch": 0.38689655172413795, + "grad_norm": 0.8656523823738098, + "learning_rate": 9.974425180871118e-06, + "loss": 0.8268, + "step": 187 + }, + { + "epoch": 0.3889655172413793, + "grad_norm": 0.9208945631980896, + "learning_rate": 9.973193936445185e-06, + "loss": 0.8338, + "step": 188 + }, + { + "epoch": 0.3910344827586207, + "grad_norm": 0.873414158821106, + "learning_rate": 9.971933826475162e-06, + "loss": 0.8236, + "step": 189 + }, + { + "epoch": 0.3931034482758621, + "grad_norm": 0.9169439673423767, + "learning_rate": 9.970644858275008e-06, + "loss": 0.8242, + "step": 190 + }, + { + "epoch": 0.39517241379310347, + "grad_norm": 0.8280132412910461, + "learning_rate": 9.969327039326193e-06, + "loss": 0.8269, + "step": 191 + }, + { + "epoch": 0.3972413793103448, + "grad_norm": 0.8578395247459412, + "learning_rate": 9.967980377277632e-06, + "loss": 0.7664, + "step": 192 + }, + { + "epoch": 0.3993103448275862, + "grad_norm": 0.8905231952667236, + "learning_rate": 9.966604879945659e-06, + "loss": 0.8239, + "step": 193 + }, + { + "epoch": 0.4013793103448276, + "grad_norm": 0.8388824462890625, + "learning_rate": 9.96520055531397e-06, + "loss": 0.8128, + "step": 194 + }, + { + "epoch": 0.40344827586206894, + "grad_norm": 0.7973628640174866, + "learning_rate": 9.963767411533584e-06, + "loss": 0.7949, + "step": 195 + }, + { + "epoch": 0.40551724137931033, + "grad_norm": 0.8202701807022095, + "learning_rate": 9.962305456922792e-06, + "loss": 0.8213, + "step": 196 + }, + { + "epoch": 0.40758620689655173, + "grad_norm": 0.7976661324501038, + "learning_rate": 9.960814699967112e-06, + "loss": 0.7915, + "step": 197 + }, + { + "epoch": 0.4096551724137931, + "grad_norm": 0.7930517196655273, + "learning_rate": 9.959295149319233e-06, + "loss": 0.805, + "step": 198 + }, + { + "epoch": 0.41172413793103446, + "grad_norm": 0.809988260269165, + "learning_rate": 9.95774681379897e-06, + "loss": 0.7896, + "step": 199 + }, + { + "epoch": 0.41379310344827586, + "grad_norm": 0.8766646981239319, + "learning_rate": 9.956169702393215e-06, + "loss": 0.8189, + "step": 200 + }, + { + "epoch": 0.41586206896551725, + "grad_norm": 0.9457526803016663, + "learning_rate": 9.954563824255879e-06, + "loss": 0.8189, + "step": 201 + }, + { + "epoch": 0.41793103448275865, + "grad_norm": 0.9090128540992737, + "learning_rate": 9.952929188707843e-06, + "loss": 0.8614, + "step": 202 + }, + { + "epoch": 0.42, + "grad_norm": 0.8124152421951294, + "learning_rate": 9.951265805236903e-06, + "loss": 0.8186, + "step": 203 + }, + { + "epoch": 0.4220689655172414, + "grad_norm": 0.8770976066589355, + "learning_rate": 9.94957368349771e-06, + "loss": 0.7963, + "step": 204 + }, + { + "epoch": 0.4241379310344828, + "grad_norm": 0.8936490416526794, + "learning_rate": 9.947852833311725e-06, + "loss": 0.8278, + "step": 205 + }, + { + "epoch": 0.4262068965517241, + "grad_norm": 0.9065835475921631, + "learning_rate": 9.946103264667151e-06, + "loss": 0.8026, + "step": 206 + }, + { + "epoch": 0.4282758620689655, + "grad_norm": 0.755096435546875, + "learning_rate": 9.94432498771888e-06, + "loss": 0.7987, + "step": 207 + }, + { + "epoch": 0.4303448275862069, + "grad_norm": 0.8373960256576538, + "learning_rate": 9.942518012788438e-06, + "loss": 0.7967, + "step": 208 + }, + { + "epoch": 0.4324137931034483, + "grad_norm": 0.8678641319274902, + "learning_rate": 9.940682350363913e-06, + "loss": 0.8212, + "step": 209 + }, + { + "epoch": 0.43448275862068964, + "grad_norm": 0.8203259706497192, + "learning_rate": 9.938818011099906e-06, + "loss": 0.7922, + "step": 210 + }, + { + "epoch": 0.43655172413793103, + "grad_norm": 0.8179094195365906, + "learning_rate": 9.936925005817464e-06, + "loss": 0.8271, + "step": 211 + }, + { + "epoch": 0.4386206896551724, + "grad_norm": 0.8908324241638184, + "learning_rate": 9.93500334550402e-06, + "loss": 0.7867, + "step": 212 + }, + { + "epoch": 0.4406896551724138, + "grad_norm": 0.8492280840873718, + "learning_rate": 9.933053041313325e-06, + "loss": 0.8225, + "step": 213 + }, + { + "epoch": 0.44275862068965516, + "grad_norm": 0.8480848670005798, + "learning_rate": 9.931074104565387e-06, + "loss": 0.8418, + "step": 214 + }, + { + "epoch": 0.44482758620689655, + "grad_norm": 0.857255756855011, + "learning_rate": 9.929066546746403e-06, + "loss": 0.8297, + "step": 215 + }, + { + "epoch": 0.44689655172413795, + "grad_norm": 0.9532951712608337, + "learning_rate": 9.927030379508691e-06, + "loss": 0.8094, + "step": 216 + }, + { + "epoch": 0.4489655172413793, + "grad_norm": 0.8437204360961914, + "learning_rate": 9.924965614670629e-06, + "loss": 0.7996, + "step": 217 + }, + { + "epoch": 0.4510344827586207, + "grad_norm": 0.7982957363128662, + "learning_rate": 9.922872264216581e-06, + "loss": 0.7962, + "step": 218 + }, + { + "epoch": 0.4531034482758621, + "grad_norm": 0.8589974045753479, + "learning_rate": 9.920750340296823e-06, + "loss": 0.8311, + "step": 219 + }, + { + "epoch": 0.45517241379310347, + "grad_norm": 0.8524751663208008, + "learning_rate": 9.918599855227487e-06, + "loss": 0.8113, + "step": 220 + }, + { + "epoch": 0.4572413793103448, + "grad_norm": 0.8594533205032349, + "learning_rate": 9.916420821490474e-06, + "loss": 0.8201, + "step": 221 + }, + { + "epoch": 0.4593103448275862, + "grad_norm": 0.872061550617218, + "learning_rate": 9.914213251733387e-06, + "loss": 0.8014, + "step": 222 + }, + { + "epoch": 0.4613793103448276, + "grad_norm": 0.8578385710716248, + "learning_rate": 9.911977158769461e-06, + "loss": 0.787, + "step": 223 + }, + { + "epoch": 0.463448275862069, + "grad_norm": 0.9662230610847473, + "learning_rate": 9.909712555577491e-06, + "loss": 0.7924, + "step": 224 + }, + { + "epoch": 0.46551724137931033, + "grad_norm": 0.909537672996521, + "learning_rate": 9.90741945530174e-06, + "loss": 0.8267, + "step": 225 + }, + { + "epoch": 0.4675862068965517, + "grad_norm": 0.8204202651977539, + "learning_rate": 9.905097871251887e-06, + "loss": 0.8142, + "step": 226 + }, + { + "epoch": 0.4696551724137931, + "grad_norm": 0.8849340081214905, + "learning_rate": 9.90274781690293e-06, + "loss": 0.8364, + "step": 227 + }, + { + "epoch": 0.47172413793103446, + "grad_norm": 0.825469970703125, + "learning_rate": 9.900369305895115e-06, + "loss": 0.7874, + "step": 228 + }, + { + "epoch": 0.47379310344827585, + "grad_norm": 0.799996018409729, + "learning_rate": 9.89796235203386e-06, + "loss": 0.8044, + "step": 229 + }, + { + "epoch": 0.47586206896551725, + "grad_norm": 0.8811237812042236, + "learning_rate": 9.895526969289671e-06, + "loss": 0.8027, + "step": 230 + }, + { + "epoch": 0.47793103448275864, + "grad_norm": 0.9074158072471619, + "learning_rate": 9.89306317179806e-06, + "loss": 0.8172, + "step": 231 + }, + { + "epoch": 0.48, + "grad_norm": 0.7685100436210632, + "learning_rate": 9.890570973859468e-06, + "loss": 0.8144, + "step": 232 + }, + { + "epoch": 0.4820689655172414, + "grad_norm": 0.8316488265991211, + "learning_rate": 9.888050389939172e-06, + "loss": 0.8188, + "step": 233 + }, + { + "epoch": 0.48413793103448277, + "grad_norm": 0.8519396185874939, + "learning_rate": 9.885501434667215e-06, + "loss": 0.8242, + "step": 234 + }, + { + "epoch": 0.4862068965517241, + "grad_norm": 0.8774333000183105, + "learning_rate": 9.882924122838311e-06, + "loss": 0.8083, + "step": 235 + }, + { + "epoch": 0.4882758620689655, + "grad_norm": 0.8599486351013184, + "learning_rate": 9.88031846941176e-06, + "loss": 0.8024, + "step": 236 + }, + { + "epoch": 0.4903448275862069, + "grad_norm": 0.8138340711593628, + "learning_rate": 9.877684489511367e-06, + "loss": 0.8242, + "step": 237 + }, + { + "epoch": 0.4924137931034483, + "grad_norm": 0.9175668954849243, + "learning_rate": 9.875022198425347e-06, + "loss": 0.816, + "step": 238 + }, + { + "epoch": 0.49448275862068963, + "grad_norm": 0.8907325267791748, + "learning_rate": 9.872331611606238e-06, + "loss": 0.7923, + "step": 239 + }, + { + "epoch": 0.496551724137931, + "grad_norm": 0.8826651573181152, + "learning_rate": 9.869612744670817e-06, + "loss": 0.8359, + "step": 240 + }, + { + "epoch": 0.4986206896551724, + "grad_norm": 0.866460919380188, + "learning_rate": 9.866865613400008e-06, + "loss": 0.7803, + "step": 241 + }, + { + "epoch": 0.5006896551724138, + "grad_norm": 0.9463049173355103, + "learning_rate": 9.864090233738776e-06, + "loss": 0.8221, + "step": 242 + }, + { + "epoch": 0.5027586206896552, + "grad_norm": 0.8751413226127625, + "learning_rate": 9.861286621796056e-06, + "loss": 0.8299, + "step": 243 + }, + { + "epoch": 0.5048275862068966, + "grad_norm": 0.9759247303009033, + "learning_rate": 9.858454793844648e-06, + "loss": 0.8147, + "step": 244 + }, + { + "epoch": 0.506896551724138, + "grad_norm": 0.899433434009552, + "learning_rate": 9.855594766321122e-06, + "loss": 0.8172, + "step": 245 + }, + { + "epoch": 0.5089655172413793, + "grad_norm": 0.8528696298599243, + "learning_rate": 9.852706555825727e-06, + "loss": 0.7903, + "step": 246 + }, + { + "epoch": 0.5110344827586207, + "grad_norm": 0.9641590714454651, + "learning_rate": 9.849790179122287e-06, + "loss": 0.7741, + "step": 247 + }, + { + "epoch": 0.5131034482758621, + "grad_norm": 0.8884571194648743, + "learning_rate": 9.846845653138117e-06, + "loss": 0.8316, + "step": 248 + }, + { + "epoch": 0.5151724137931034, + "grad_norm": 0.8643737435340881, + "learning_rate": 9.843872994963912e-06, + "loss": 0.7956, + "step": 249 + }, + { + "epoch": 0.5172413793103449, + "grad_norm": 0.9500787854194641, + "learning_rate": 9.84087222185365e-06, + "loss": 0.8379, + "step": 250 + }, + { + "epoch": 0.5193103448275862, + "grad_norm": 0.8723439574241638, + "learning_rate": 9.837843351224504e-06, + "loss": 0.8169, + "step": 251 + }, + { + "epoch": 0.5213793103448275, + "grad_norm": 0.9550707936286926, + "learning_rate": 9.834786400656719e-06, + "loss": 0.8016, + "step": 252 + }, + { + "epoch": 0.523448275862069, + "grad_norm": 0.9410514831542969, + "learning_rate": 9.831701387893533e-06, + "loss": 0.7718, + "step": 253 + }, + { + "epoch": 0.5255172413793103, + "grad_norm": 0.8164445757865906, + "learning_rate": 9.828588330841055e-06, + "loss": 0.8035, + "step": 254 + }, + { + "epoch": 0.5275862068965518, + "grad_norm": 0.8466964364051819, + "learning_rate": 9.825447247568177e-06, + "loss": 0.8199, + "step": 255 + }, + { + "epoch": 0.5296551724137931, + "grad_norm": 1.039279818534851, + "learning_rate": 9.822278156306455e-06, + "loss": 0.8012, + "step": 256 + }, + { + "epoch": 0.5317241379310345, + "grad_norm": 0.9037715196609497, + "learning_rate": 9.819081075450014e-06, + "loss": 0.8094, + "step": 257 + }, + { + "epoch": 0.5337931034482759, + "grad_norm": 0.8636988401412964, + "learning_rate": 9.815856023555437e-06, + "loss": 0.8395, + "step": 258 + }, + { + "epoch": 0.5358620689655172, + "grad_norm": 0.883048415184021, + "learning_rate": 9.812603019341652e-06, + "loss": 0.7844, + "step": 259 + }, + { + "epoch": 0.5379310344827586, + "grad_norm": 0.861579954624176, + "learning_rate": 9.809322081689834e-06, + "loss": 0.8218, + "step": 260 + }, + { + "epoch": 0.54, + "grad_norm": 0.9384899139404297, + "learning_rate": 9.80601322964329e-06, + "loss": 0.802, + "step": 261 + }, + { + "epoch": 0.5420689655172414, + "grad_norm": 0.8394620418548584, + "learning_rate": 9.802676482407343e-06, + "loss": 0.7974, + "step": 262 + }, + { + "epoch": 0.5441379310344827, + "grad_norm": 0.8985764384269714, + "learning_rate": 9.799311859349235e-06, + "loss": 0.8001, + "step": 263 + }, + { + "epoch": 0.5462068965517242, + "grad_norm": 0.8577210307121277, + "learning_rate": 9.795919379997993e-06, + "loss": 0.8312, + "step": 264 + }, + { + "epoch": 0.5482758620689655, + "grad_norm": 0.8467931151390076, + "learning_rate": 9.792499064044343e-06, + "loss": 0.8007, + "step": 265 + }, + { + "epoch": 0.5503448275862068, + "grad_norm": 0.9455611705780029, + "learning_rate": 9.789050931340572e-06, + "loss": 0.8143, + "step": 266 + }, + { + "epoch": 0.5524137931034483, + "grad_norm": 0.8457427620887756, + "learning_rate": 9.785575001900418e-06, + "loss": 0.7889, + "step": 267 + }, + { + "epoch": 0.5544827586206896, + "grad_norm": 0.8870904445648193, + "learning_rate": 9.78207129589897e-06, + "loss": 0.823, + "step": 268 + }, + { + "epoch": 0.5565517241379311, + "grad_norm": 0.8103102445602417, + "learning_rate": 9.778539833672525e-06, + "loss": 0.7962, + "step": 269 + }, + { + "epoch": 0.5586206896551724, + "grad_norm": 0.8109989166259766, + "learning_rate": 9.774980635718494e-06, + "loss": 0.8107, + "step": 270 + }, + { + "epoch": 0.5606896551724138, + "grad_norm": 0.8663826584815979, + "learning_rate": 9.771393722695267e-06, + "loss": 0.8158, + "step": 271 + }, + { + "epoch": 0.5627586206896552, + "grad_norm": 1.5541844367980957, + "learning_rate": 9.767779115422097e-06, + "loss": 0.7898, + "step": 272 + }, + { + "epoch": 0.5648275862068965, + "grad_norm": 0.8373553156852722, + "learning_rate": 9.764136834878987e-06, + "loss": 0.7963, + "step": 273 + }, + { + "epoch": 0.5668965517241379, + "grad_norm": 0.7791042923927307, + "learning_rate": 9.760466902206557e-06, + "loss": 0.8085, + "step": 274 + }, + { + "epoch": 0.5689655172413793, + "grad_norm": 0.8393211960792542, + "learning_rate": 9.756769338705927e-06, + "loss": 0.8136, + "step": 275 + }, + { + "epoch": 0.5710344827586207, + "grad_norm": 0.811062216758728, + "learning_rate": 9.753044165838594e-06, + "loss": 0.8075, + "step": 276 + }, + { + "epoch": 0.573103448275862, + "grad_norm": 0.8894556760787964, + "learning_rate": 9.749291405226304e-06, + "loss": 0.8371, + "step": 277 + }, + { + "epoch": 0.5751724137931035, + "grad_norm": 0.8580628037452698, + "learning_rate": 9.745511078650931e-06, + "loss": 0.7695, + "step": 278 + }, + { + "epoch": 0.5772413793103448, + "grad_norm": 0.7854070067405701, + "learning_rate": 9.741703208054347e-06, + "loss": 0.8076, + "step": 279 + }, + { + "epoch": 0.5793103448275863, + "grad_norm": 0.8000266551971436, + "learning_rate": 9.737867815538292e-06, + "loss": 0.7894, + "step": 280 + }, + { + "epoch": 0.5813793103448276, + "grad_norm": 0.8094033002853394, + "learning_rate": 9.734004923364258e-06, + "loss": 0.8035, + "step": 281 + }, + { + "epoch": 0.5834482758620689, + "grad_norm": 0.8682005405426025, + "learning_rate": 9.73011455395334e-06, + "loss": 0.8219, + "step": 282 + }, + { + "epoch": 0.5855172413793104, + "grad_norm": 0.8397322297096252, + "learning_rate": 9.72619672988613e-06, + "loss": 0.8241, + "step": 283 + }, + { + "epoch": 0.5875862068965517, + "grad_norm": 0.8110212087631226, + "learning_rate": 9.722251473902562e-06, + "loss": 0.8099, + "step": 284 + }, + { + "epoch": 0.5896551724137931, + "grad_norm": 0.8143454790115356, + "learning_rate": 9.718278808901797e-06, + "loss": 0.7744, + "step": 285 + }, + { + "epoch": 0.5917241379310345, + "grad_norm": 0.8721611499786377, + "learning_rate": 9.714278757942082e-06, + "loss": 0.785, + "step": 286 + }, + { + "epoch": 0.5937931034482758, + "grad_norm": 0.7908191084861755, + "learning_rate": 9.710251344240619e-06, + "loss": 0.7983, + "step": 287 + }, + { + "epoch": 0.5958620689655172, + "grad_norm": 0.8809441924095154, + "learning_rate": 9.70619659117343e-06, + "loss": 0.8428, + "step": 288 + }, + { + "epoch": 0.5979310344827586, + "grad_norm": 0.8129044771194458, + "learning_rate": 9.702114522275216e-06, + "loss": 0.8149, + "step": 289 + }, + { + "epoch": 0.6, + "grad_norm": 0.826924204826355, + "learning_rate": 9.698005161239237e-06, + "loss": 0.8091, + "step": 290 + }, + { + "epoch": 0.6020689655172414, + "grad_norm": 0.8344675302505493, + "learning_rate": 9.69386853191715e-06, + "loss": 0.7913, + "step": 291 + }, + { + "epoch": 0.6041379310344828, + "grad_norm": 0.8673633933067322, + "learning_rate": 9.689704658318888e-06, + "loss": 0.8, + "step": 292 + }, + { + "epoch": 0.6062068965517241, + "grad_norm": 0.7886486053466797, + "learning_rate": 9.685513564612521e-06, + "loss": 0.7967, + "step": 293 + }, + { + "epoch": 0.6082758620689656, + "grad_norm": 0.8335657715797424, + "learning_rate": 9.681295275124104e-06, + "loss": 0.7843, + "step": 294 + }, + { + "epoch": 0.6103448275862069, + "grad_norm": 0.934149980545044, + "learning_rate": 9.67704981433754e-06, + "loss": 0.7967, + "step": 295 + }, + { + "epoch": 0.6124137931034482, + "grad_norm": 0.8753807544708252, + "learning_rate": 9.672777206894452e-06, + "loss": 0.8086, + "step": 296 + }, + { + "epoch": 0.6144827586206897, + "grad_norm": 0.8328104615211487, + "learning_rate": 9.668477477594021e-06, + "loss": 0.8006, + "step": 297 + }, + { + "epoch": 0.616551724137931, + "grad_norm": 0.8407275080680847, + "learning_rate": 9.664150651392846e-06, + "loss": 0.7968, + "step": 298 + }, + { + "epoch": 0.6186206896551724, + "grad_norm": 0.8628553748130798, + "learning_rate": 9.659796753404806e-06, + "loss": 0.8256, + "step": 299 + }, + { + "epoch": 0.6206896551724138, + "grad_norm": 0.7844734191894531, + "learning_rate": 9.655415808900915e-06, + "loss": 0.784, + "step": 300 + }, + { + "epoch": 0.6227586206896552, + "grad_norm": 0.8278802633285522, + "learning_rate": 9.651007843309164e-06, + "loss": 0.7548, + "step": 301 + }, + { + "epoch": 0.6248275862068966, + "grad_norm": 0.8672357201576233, + "learning_rate": 9.646572882214385e-06, + "loss": 0.8006, + "step": 302 + }, + { + "epoch": 0.6268965517241379, + "grad_norm": 0.8474316000938416, + "learning_rate": 9.642110951358097e-06, + "loss": 0.8056, + "step": 303 + }, + { + "epoch": 0.6289655172413793, + "grad_norm": 0.8918145895004272, + "learning_rate": 9.637622076638358e-06, + "loss": 0.8086, + "step": 304 + }, + { + "epoch": 0.6310344827586207, + "grad_norm": 0.7701090574264526, + "learning_rate": 9.633106284109612e-06, + "loss": 0.7971, + "step": 305 + }, + { + "epoch": 0.6331034482758621, + "grad_norm": 0.7959749102592468, + "learning_rate": 9.62856359998254e-06, + "loss": 0.7914, + "step": 306 + }, + { + "epoch": 0.6351724137931034, + "grad_norm": 0.8288137912750244, + "learning_rate": 9.623994050623917e-06, + "loss": 0.8074, + "step": 307 + }, + { + "epoch": 0.6372413793103449, + "grad_norm": 0.7820798754692078, + "learning_rate": 9.619397662556434e-06, + "loss": 0.764, + "step": 308 + }, + { + "epoch": 0.6393103448275862, + "grad_norm": 0.8969067335128784, + "learning_rate": 9.614774462458573e-06, + "loss": 0.8464, + "step": 309 + }, + { + "epoch": 0.6413793103448275, + "grad_norm": 0.8294548988342285, + "learning_rate": 9.610124477164436e-06, + "loss": 0.8576, + "step": 310 + }, + { + "epoch": 0.643448275862069, + "grad_norm": 0.8388926982879639, + "learning_rate": 9.605447733663586e-06, + "loss": 0.7603, + "step": 311 + }, + { + "epoch": 0.6455172413793103, + "grad_norm": 0.8787766098976135, + "learning_rate": 9.600744259100905e-06, + "loss": 0.7991, + "step": 312 + }, + { + "epoch": 0.6475862068965518, + "grad_norm": 0.8679440021514893, + "learning_rate": 9.596014080776424e-06, + "loss": 0.7977, + "step": 313 + }, + { + "epoch": 0.6496551724137931, + "grad_norm": 0.8179478049278259, + "learning_rate": 9.59125722614517e-06, + "loss": 0.8009, + "step": 314 + }, + { + "epoch": 0.6517241379310345, + "grad_norm": 0.8390472531318665, + "learning_rate": 9.586473722817002e-06, + "loss": 0.8023, + "step": 315 + }, + { + "epoch": 0.6537931034482759, + "grad_norm": 0.9145618677139282, + "learning_rate": 9.581663598556461e-06, + "loss": 0.7888, + "step": 316 + }, + { + "epoch": 0.6558620689655172, + "grad_norm": 0.7960366010665894, + "learning_rate": 9.576826881282595e-06, + "loss": 0.8047, + "step": 317 + }, + { + "epoch": 0.6579310344827586, + "grad_norm": 0.8886500000953674, + "learning_rate": 9.571963599068807e-06, + "loss": 0.7772, + "step": 318 + }, + { + "epoch": 0.66, + "grad_norm": 0.8854326009750366, + "learning_rate": 9.56707378014269e-06, + "loss": 0.783, + "step": 319 + }, + { + "epoch": 0.6620689655172414, + "grad_norm": 0.7884778380393982, + "learning_rate": 9.56215745288586e-06, + "loss": 0.7783, + "step": 320 + }, + { + "epoch": 0.6641379310344827, + "grad_norm": 0.9254597425460815, + "learning_rate": 9.557214645833792e-06, + "loss": 0.7903, + "step": 321 + }, + { + "epoch": 0.6662068965517242, + "grad_norm": 1.0196994543075562, + "learning_rate": 9.55224538767566e-06, + "loss": 0.7955, + "step": 322 + }, + { + "epoch": 0.6682758620689655, + "grad_norm": 0.8419172167778015, + "learning_rate": 9.547249707254166e-06, + "loss": 0.8135, + "step": 323 + }, + { + "epoch": 0.670344827586207, + "grad_norm": 0.85199373960495, + "learning_rate": 9.542227633565364e-06, + "loss": 0.7882, + "step": 324 + }, + { + "epoch": 0.6724137931034483, + "grad_norm": 0.8458907008171082, + "learning_rate": 9.537179195758513e-06, + "loss": 0.7803, + "step": 325 + }, + { + "epoch": 0.6744827586206896, + "grad_norm": 0.8774712681770325, + "learning_rate": 9.532104423135885e-06, + "loss": 0.8011, + "step": 326 + }, + { + "epoch": 0.6765517241379311, + "grad_norm": 0.8695656657218933, + "learning_rate": 9.527003345152616e-06, + "loss": 0.792, + "step": 327 + }, + { + "epoch": 0.6786206896551724, + "grad_norm": 0.799929141998291, + "learning_rate": 9.521875991416514e-06, + "loss": 0.7651, + "step": 328 + }, + { + "epoch": 0.6806896551724138, + "grad_norm": 0.8962751626968384, + "learning_rate": 9.516722391687903e-06, + "loss": 0.7951, + "step": 329 + }, + { + "epoch": 0.6827586206896552, + "grad_norm": 0.8643319010734558, + "learning_rate": 9.51154257587944e-06, + "loss": 0.7967, + "step": 330 + }, + { + "epoch": 0.6848275862068965, + "grad_norm": 0.9759703874588013, + "learning_rate": 9.506336574055954e-06, + "loss": 0.7929, + "step": 331 + }, + { + "epoch": 0.6868965517241379, + "grad_norm": 0.8432797193527222, + "learning_rate": 9.501104416434255e-06, + "loss": 0.7779, + "step": 332 + }, + { + "epoch": 0.6889655172413793, + "grad_norm": 0.9403534531593323, + "learning_rate": 9.495846133382973e-06, + "loss": 0.8123, + "step": 333 + }, + { + "epoch": 0.6910344827586207, + "grad_norm": 0.8679063320159912, + "learning_rate": 9.490561755422374e-06, + "loss": 0.7462, + "step": 334 + }, + { + "epoch": 0.6931034482758621, + "grad_norm": 0.791249692440033, + "learning_rate": 9.485251313224181e-06, + "loss": 0.7982, + "step": 335 + }, + { + "epoch": 0.6951724137931035, + "grad_norm": 0.830558180809021, + "learning_rate": 9.479914837611409e-06, + "loss": 0.7568, + "step": 336 + }, + { + "epoch": 0.6972413793103448, + "grad_norm": 0.8038316369056702, + "learning_rate": 9.474552359558167e-06, + "loss": 0.7626, + "step": 337 + }, + { + "epoch": 0.6993103448275862, + "grad_norm": 0.7749502658843994, + "learning_rate": 9.469163910189494e-06, + "loss": 0.79, + "step": 338 + }, + { + "epoch": 0.7013793103448276, + "grad_norm": 0.9494272470474243, + "learning_rate": 9.463749520781172e-06, + "loss": 0.7803, + "step": 339 + }, + { + "epoch": 0.7034482758620689, + "grad_norm": 0.8260956406593323, + "learning_rate": 9.458309222759542e-06, + "loss": 0.7729, + "step": 340 + }, + { + "epoch": 0.7055172413793104, + "grad_norm": 0.8298391699790955, + "learning_rate": 9.452843047701324e-06, + "loss": 0.8141, + "step": 341 + }, + { + "epoch": 0.7075862068965517, + "grad_norm": 0.9029043912887573, + "learning_rate": 9.44735102733344e-06, + "loss": 0.797, + "step": 342 + }, + { + "epoch": 0.7096551724137931, + "grad_norm": 0.8290296196937561, + "learning_rate": 9.441833193532817e-06, + "loss": 0.7708, + "step": 343 + }, + { + "epoch": 0.7117241379310345, + "grad_norm": 0.7795094847679138, + "learning_rate": 9.436289578326215e-06, + "loss": 0.8196, + "step": 344 + }, + { + "epoch": 0.7137931034482758, + "grad_norm": 0.7970453500747681, + "learning_rate": 9.43072021389003e-06, + "loss": 0.822, + "step": 345 + }, + { + "epoch": 0.7158620689655173, + "grad_norm": 0.8111122250556946, + "learning_rate": 9.425125132550115e-06, + "loss": 0.771, + "step": 346 + }, + { + "epoch": 0.7179310344827586, + "grad_norm": 0.782451868057251, + "learning_rate": 9.419504366781591e-06, + "loss": 0.7865, + "step": 347 + }, + { + "epoch": 0.72, + "grad_norm": 0.7692107558250427, + "learning_rate": 9.413857949208655e-06, + "loss": 0.7748, + "step": 348 + }, + { + "epoch": 0.7220689655172414, + "grad_norm": 0.9149669408798218, + "learning_rate": 9.408185912604395e-06, + "loss": 0.7847, + "step": 349 + }, + { + "epoch": 0.7241379310344828, + "grad_norm": 0.812156081199646, + "learning_rate": 9.402488289890593e-06, + "loss": 0.7921, + "step": 350 + }, + { + "epoch": 0.7262068965517241, + "grad_norm": 0.8430172801017761, + "learning_rate": 9.396765114137544e-06, + "loss": 0.7752, + "step": 351 + }, + { + "epoch": 0.7282758620689656, + "grad_norm": 0.8315995931625366, + "learning_rate": 9.391016418563856e-06, + "loss": 0.7867, + "step": 352 + }, + { + "epoch": 0.7303448275862069, + "grad_norm": 0.8469542860984802, + "learning_rate": 9.385242236536259e-06, + "loss": 0.7573, + "step": 353 + }, + { + "epoch": 0.7324137931034482, + "grad_norm": 0.8568265438079834, + "learning_rate": 9.379442601569415e-06, + "loss": 0.7873, + "step": 354 + }, + { + "epoch": 0.7344827586206897, + "grad_norm": 0.8971392512321472, + "learning_rate": 9.37361754732572e-06, + "loss": 0.8196, + "step": 355 + }, + { + "epoch": 0.736551724137931, + "grad_norm": 0.8264282941818237, + "learning_rate": 9.367767107615106e-06, + "loss": 0.7785, + "step": 356 + }, + { + "epoch": 0.7386206896551725, + "grad_norm": 0.852152407169342, + "learning_rate": 9.36189131639485e-06, + "loss": 0.7655, + "step": 357 + }, + { + "epoch": 0.7406896551724138, + "grad_norm": 0.7565860152244568, + "learning_rate": 9.355990207769378e-06, + "loss": 0.7672, + "step": 358 + }, + { + "epoch": 0.7427586206896551, + "grad_norm": 0.8384962677955627, + "learning_rate": 9.35006381599006e-06, + "loss": 0.7768, + "step": 359 + }, + { + "epoch": 0.7448275862068966, + "grad_norm": 0.806664764881134, + "learning_rate": 9.344112175455013e-06, + "loss": 0.7716, + "step": 360 + }, + { + "epoch": 0.7468965517241379, + "grad_norm": 0.848978579044342, + "learning_rate": 9.338135320708912e-06, + "loss": 0.817, + "step": 361 + }, + { + "epoch": 0.7489655172413793, + "grad_norm": 0.8164881467819214, + "learning_rate": 9.332133286442772e-06, + "loss": 0.7967, + "step": 362 + }, + { + "epoch": 0.7510344827586207, + "grad_norm": 0.855451226234436, + "learning_rate": 9.326106107493762e-06, + "loss": 0.7519, + "step": 363 + }, + { + "epoch": 0.7531034482758621, + "grad_norm": 0.8127675652503967, + "learning_rate": 9.32005381884499e-06, + "loss": 0.758, + "step": 364 + }, + { + "epoch": 0.7551724137931034, + "grad_norm": 0.8386479020118713, + "learning_rate": 9.313976455625316e-06, + "loss": 0.7947, + "step": 365 + }, + { + "epoch": 0.7572413793103449, + "grad_norm": 0.9732878804206848, + "learning_rate": 9.307874053109127e-06, + "loss": 0.7968, + "step": 366 + }, + { + "epoch": 0.7593103448275862, + "grad_norm": 0.9192622900009155, + "learning_rate": 9.301746646716152e-06, + "loss": 0.7927, + "step": 367 + }, + { + "epoch": 0.7613793103448275, + "grad_norm": 0.8056217432022095, + "learning_rate": 9.295594272011246e-06, + "loss": 0.7892, + "step": 368 + }, + { + "epoch": 0.763448275862069, + "grad_norm": 0.8506579399108887, + "learning_rate": 9.289416964704186e-06, + "loss": 0.7746, + "step": 369 + }, + { + "epoch": 0.7655172413793103, + "grad_norm": 0.8824191689491272, + "learning_rate": 9.283214760649462e-06, + "loss": 0.7749, + "step": 370 + }, + { + "epoch": 0.7675862068965518, + "grad_norm": 0.894450306892395, + "learning_rate": 9.27698769584607e-06, + "loss": 0.7792, + "step": 371 + }, + { + "epoch": 0.7696551724137931, + "grad_norm": 0.8521785140037537, + "learning_rate": 9.270735806437306e-06, + "loss": 0.8031, + "step": 372 + }, + { + "epoch": 0.7717241379310344, + "grad_norm": 0.9518225789070129, + "learning_rate": 9.26445912871055e-06, + "loss": 0.8018, + "step": 373 + }, + { + "epoch": 0.7737931034482759, + "grad_norm": 0.82342129945755, + "learning_rate": 9.258157699097065e-06, + "loss": 0.7745, + "step": 374 + }, + { + "epoch": 0.7758620689655172, + "grad_norm": 0.9217233657836914, + "learning_rate": 9.251831554171773e-06, + "loss": 0.8179, + "step": 375 + }, + { + "epoch": 0.7779310344827586, + "grad_norm": 0.9028921127319336, + "learning_rate": 9.245480730653055e-06, + "loss": 0.7961, + "step": 376 + }, + { + "epoch": 0.78, + "grad_norm": 0.7794080376625061, + "learning_rate": 9.239105265402525e-06, + "loss": 0.7874, + "step": 377 + }, + { + "epoch": 0.7820689655172414, + "grad_norm": 0.8010353446006775, + "learning_rate": 9.232705195424832e-06, + "loss": 0.7673, + "step": 378 + }, + { + "epoch": 0.7841379310344827, + "grad_norm": 0.8443918824195862, + "learning_rate": 9.226280557867428e-06, + "loss": 0.7922, + "step": 379 + }, + { + "epoch": 0.7862068965517242, + "grad_norm": 0.8193161487579346, + "learning_rate": 9.219831390020367e-06, + "loss": 0.7668, + "step": 380 + }, + { + "epoch": 0.7882758620689655, + "grad_norm": 0.8224409818649292, + "learning_rate": 9.213357729316077e-06, + "loss": 0.7643, + "step": 381 + }, + { + "epoch": 0.7903448275862069, + "grad_norm": 0.8491160273551941, + "learning_rate": 9.206859613329155e-06, + "loss": 0.7801, + "step": 382 + }, + { + "epoch": 0.7924137931034483, + "grad_norm": 0.8366249203681946, + "learning_rate": 9.200337079776136e-06, + "loss": 0.7496, + "step": 383 + }, + { + "epoch": 0.7944827586206896, + "grad_norm": 0.7923499345779419, + "learning_rate": 9.193790166515282e-06, + "loss": 0.7988, + "step": 384 + }, + { + "epoch": 0.7965517241379311, + "grad_norm": 0.8000715970993042, + "learning_rate": 9.187218911546363e-06, + "loss": 0.7583, + "step": 385 + }, + { + "epoch": 0.7986206896551724, + "grad_norm": 0.7873345017433167, + "learning_rate": 9.180623353010428e-06, + "loss": 0.7631, + "step": 386 + }, + { + "epoch": 0.8006896551724138, + "grad_norm": 0.8444647789001465, + "learning_rate": 9.174003529189592e-06, + "loss": 0.7837, + "step": 387 + }, + { + "epoch": 0.8027586206896552, + "grad_norm": 0.7419801354408264, + "learning_rate": 9.167359478506816e-06, + "loss": 0.7626, + "step": 388 + }, + { + "epoch": 0.8048275862068965, + "grad_norm": 0.774566113948822, + "learning_rate": 9.160691239525675e-06, + "loss": 0.7728, + "step": 389 + }, + { + "epoch": 0.8068965517241379, + "grad_norm": 0.730536699295044, + "learning_rate": 9.153998850950133e-06, + "loss": 0.7907, + "step": 390 + }, + { + "epoch": 0.8089655172413793, + "grad_norm": 0.726324737071991, + "learning_rate": 9.147282351624334e-06, + "loss": 0.7827, + "step": 391 + }, + { + "epoch": 0.8110344827586207, + "grad_norm": 0.7642649412155151, + "learning_rate": 9.140541780532362e-06, + "loss": 0.7417, + "step": 392 + }, + { + "epoch": 0.8131034482758621, + "grad_norm": 0.8225309252738953, + "learning_rate": 9.133777176798013e-06, + "loss": 0.8105, + "step": 393 + }, + { + "epoch": 0.8151724137931035, + "grad_norm": 0.7588528394699097, + "learning_rate": 9.126988579684587e-06, + "loss": 0.781, + "step": 394 + }, + { + "epoch": 0.8172413793103448, + "grad_norm": 0.8489255309104919, + "learning_rate": 9.120176028594632e-06, + "loss": 0.7515, + "step": 395 + }, + { + "epoch": 0.8193103448275862, + "grad_norm": 0.8431716561317444, + "learning_rate": 9.113339563069742e-06, + "loss": 0.7931, + "step": 396 + }, + { + "epoch": 0.8213793103448276, + "grad_norm": 0.8133612871170044, + "learning_rate": 9.106479222790312e-06, + "loss": 0.7877, + "step": 397 + }, + { + "epoch": 0.8234482758620689, + "grad_norm": 0.8307138085365295, + "learning_rate": 9.099595047575308e-06, + "loss": 0.7774, + "step": 398 + }, + { + "epoch": 0.8255172413793104, + "grad_norm": 0.7466663122177124, + "learning_rate": 9.092687077382043e-06, + "loss": 0.7667, + "step": 399 + }, + { + "epoch": 0.8275862068965517, + "grad_norm": 0.8137718439102173, + "learning_rate": 9.08575535230594e-06, + "loss": 0.8056, + "step": 400 + }, + { + "epoch": 0.829655172413793, + "grad_norm": 0.8068400621414185, + "learning_rate": 9.078799912580305e-06, + "loss": 0.7588, + "step": 401 + }, + { + "epoch": 0.8317241379310345, + "grad_norm": 0.8182150721549988, + "learning_rate": 9.071820798576083e-06, + "loss": 0.7748, + "step": 402 + }, + { + "epoch": 0.8337931034482758, + "grad_norm": 0.8240687251091003, + "learning_rate": 9.064818050801634e-06, + "loss": 0.7854, + "step": 403 + }, + { + "epoch": 0.8358620689655173, + "grad_norm": 0.8210726976394653, + "learning_rate": 9.057791709902491e-06, + "loss": 0.7809, + "step": 404 + }, + { + "epoch": 0.8379310344827586, + "grad_norm": 0.8129264712333679, + "learning_rate": 9.050741816661128e-06, + "loss": 0.7747, + "step": 405 + }, + { + "epoch": 0.84, + "grad_norm": 0.7455905079841614, + "learning_rate": 9.04366841199672e-06, + "loss": 0.777, + "step": 406 + }, + { + "epoch": 0.8420689655172414, + "grad_norm": 0.8041906356811523, + "learning_rate": 9.036571536964915e-06, + "loss": 0.7634, + "step": 407 + }, + { + "epoch": 0.8441379310344828, + "grad_norm": 0.7791020274162292, + "learning_rate": 9.029451232757581e-06, + "loss": 0.803, + "step": 408 + }, + { + "epoch": 0.8462068965517241, + "grad_norm": 0.8423375487327576, + "learning_rate": 9.022307540702576e-06, + "loss": 0.7263, + "step": 409 + }, + { + "epoch": 0.8482758620689655, + "grad_norm": 0.8119176030158997, + "learning_rate": 9.015140502263506e-06, + "loss": 0.7459, + "step": 410 + }, + { + "epoch": 0.8503448275862069, + "grad_norm": 0.8209352493286133, + "learning_rate": 9.007950159039488e-06, + "loss": 0.7858, + "step": 411 + }, + { + "epoch": 0.8524137931034482, + "grad_norm": 0.8128482699394226, + "learning_rate": 9.0007365527649e-06, + "loss": 0.7707, + "step": 412 + }, + { + "epoch": 0.8544827586206897, + "grad_norm": 0.8830553293228149, + "learning_rate": 8.993499725309148e-06, + "loss": 0.7579, + "step": 413 + }, + { + "epoch": 0.856551724137931, + "grad_norm": 0.8541165590286255, + "learning_rate": 8.98623971867642e-06, + "loss": 0.7638, + "step": 414 + }, + { + "epoch": 0.8586206896551725, + "grad_norm": 0.7872908115386963, + "learning_rate": 8.978956575005432e-06, + "loss": 0.7967, + "step": 415 + }, + { + "epoch": 0.8606896551724138, + "grad_norm": 0.7946335673332214, + "learning_rate": 8.971650336569208e-06, + "loss": 0.7769, + "step": 416 + }, + { + "epoch": 0.8627586206896551, + "grad_norm": 0.8130462169647217, + "learning_rate": 8.964321045774808e-06, + "loss": 0.7889, + "step": 417 + }, + { + "epoch": 0.8648275862068966, + "grad_norm": 0.7823083400726318, + "learning_rate": 8.956968745163093e-06, + "loss": 0.7814, + "step": 418 + }, + { + "epoch": 0.8668965517241379, + "grad_norm": 0.8023502230644226, + "learning_rate": 8.949593477408485e-06, + "loss": 0.7647, + "step": 419 + }, + { + "epoch": 0.8689655172413793, + "grad_norm": 0.7985740900039673, + "learning_rate": 8.942195285318705e-06, + "loss": 0.7794, + "step": 420 + }, + { + "epoch": 0.8710344827586207, + "grad_norm": 0.8288792371749878, + "learning_rate": 8.934774211834538e-06, + "loss": 0.7891, + "step": 421 + }, + { + "epoch": 0.8731034482758621, + "grad_norm": 0.8776261806488037, + "learning_rate": 8.927330300029575e-06, + "loss": 0.7723, + "step": 422 + }, + { + "epoch": 0.8751724137931034, + "grad_norm": 0.8444452285766602, + "learning_rate": 8.919863593109967e-06, + "loss": 0.7812, + "step": 423 + }, + { + "epoch": 0.8772413793103448, + "grad_norm": 0.8049442768096924, + "learning_rate": 8.912374134414171e-06, + "loss": 0.7765, + "step": 424 + }, + { + "epoch": 0.8793103448275862, + "grad_norm": 0.8133780360221863, + "learning_rate": 8.904861967412702e-06, + "loss": 0.7289, + "step": 425 + }, + { + "epoch": 0.8813793103448276, + "grad_norm": 0.7836434245109558, + "learning_rate": 8.897327135707881e-06, + "loss": 0.7887, + "step": 426 + }, + { + "epoch": 0.883448275862069, + "grad_norm": 0.8233901858329773, + "learning_rate": 8.889769683033573e-06, + "loss": 0.7623, + "step": 427 + }, + { + "epoch": 0.8855172413793103, + "grad_norm": 0.8158826231956482, + "learning_rate": 8.882189653254951e-06, + "loss": 0.7821, + "step": 428 + }, + { + "epoch": 0.8875862068965518, + "grad_norm": 0.8711895942687988, + "learning_rate": 8.874587090368221e-06, + "loss": 0.7668, + "step": 429 + }, + { + "epoch": 0.8896551724137931, + "grad_norm": 0.7956521511077881, + "learning_rate": 8.86696203850038e-06, + "loss": 0.762, + "step": 430 + }, + { + "epoch": 0.8917241379310344, + "grad_norm": 0.7590909600257874, + "learning_rate": 8.859314541908957e-06, + "loss": 0.7269, + "step": 431 + }, + { + "epoch": 0.8937931034482759, + "grad_norm": 0.74117511510849, + "learning_rate": 8.851644644981756e-06, + "loss": 0.7703, + "step": 432 + }, + { + "epoch": 0.8958620689655172, + "grad_norm": 0.7806484699249268, + "learning_rate": 8.843952392236595e-06, + "loss": 0.7608, + "step": 433 + }, + { + "epoch": 0.8979310344827586, + "grad_norm": 0.7991644740104675, + "learning_rate": 8.83623782832105e-06, + "loss": 0.7932, + "step": 434 + }, + { + "epoch": 0.9, + "grad_norm": 0.7456468939781189, + "learning_rate": 8.8285009980122e-06, + "loss": 0.7638, + "step": 435 + }, + { + "epoch": 0.9020689655172414, + "grad_norm": 0.814285933971405, + "learning_rate": 8.820741946216357e-06, + "loss": 0.7711, + "step": 436 + }, + { + "epoch": 0.9041379310344828, + "grad_norm": 0.767855703830719, + "learning_rate": 8.81296071796882e-06, + "loss": 0.792, + "step": 437 + }, + { + "epoch": 0.9062068965517242, + "grad_norm": 0.8761329054832458, + "learning_rate": 8.805157358433595e-06, + "loss": 0.7736, + "step": 438 + }, + { + "epoch": 0.9082758620689655, + "grad_norm": 0.9907826781272888, + "learning_rate": 8.797331912903152e-06, + "loss": 0.7725, + "step": 439 + }, + { + "epoch": 0.9103448275862069, + "grad_norm": 0.8171151280403137, + "learning_rate": 8.789484426798148e-06, + "loss": 0.7593, + "step": 440 + }, + { + "epoch": 0.9124137931034483, + "grad_norm": 0.8003857731819153, + "learning_rate": 8.78161494566717e-06, + "loss": 0.7596, + "step": 441 + }, + { + "epoch": 0.9144827586206896, + "grad_norm": 0.9617862105369568, + "learning_rate": 8.77372351518647e-06, + "loss": 0.7872, + "step": 442 + }, + { + "epoch": 0.9165517241379311, + "grad_norm": 0.8046567440032959, + "learning_rate": 8.765810181159696e-06, + "loss": 0.7482, + "step": 443 + }, + { + "epoch": 0.9186206896551724, + "grad_norm": 0.7760241031646729, + "learning_rate": 8.757874989517631e-06, + "loss": 0.7843, + "step": 444 + }, + { + "epoch": 0.9206896551724137, + "grad_norm": 0.7372346520423889, + "learning_rate": 8.74991798631793e-06, + "loss": 0.7882, + "step": 445 + }, + { + "epoch": 0.9227586206896552, + "grad_norm": 0.8651936650276184, + "learning_rate": 8.741939217744835e-06, + "loss": 0.7872, + "step": 446 + }, + { + "epoch": 0.9248275862068965, + "grad_norm": 0.7748751640319824, + "learning_rate": 8.733938730108928e-06, + "loss": 0.7801, + "step": 447 + }, + { + "epoch": 0.926896551724138, + "grad_norm": 0.8208494782447815, + "learning_rate": 8.725916569846854e-06, + "loss": 0.7878, + "step": 448 + }, + { + "epoch": 0.9289655172413793, + "grad_norm": 0.7929314374923706, + "learning_rate": 8.717872783521048e-06, + "loss": 0.7709, + "step": 449 + }, + { + "epoch": 0.9310344827586207, + "grad_norm": 0.7704197764396667, + "learning_rate": 8.709807417819464e-06, + "loss": 0.7916, + "step": 450 + }, + { + "epoch": 0.9331034482758621, + "grad_norm": 0.8212770223617554, + "learning_rate": 8.701720519555315e-06, + "loss": 0.7711, + "step": 451 + }, + { + "epoch": 0.9351724137931035, + "grad_norm": 0.8029863238334656, + "learning_rate": 8.69361213566679e-06, + "loss": 0.7774, + "step": 452 + }, + { + "epoch": 0.9372413793103448, + "grad_norm": 0.7679287791252136, + "learning_rate": 8.685482313216784e-06, + "loss": 0.7461, + "step": 453 + }, + { + "epoch": 0.9393103448275862, + "grad_norm": 0.771782398223877, + "learning_rate": 8.67733109939263e-06, + "loss": 0.7808, + "step": 454 + }, + { + "epoch": 0.9413793103448276, + "grad_norm": 0.7877687811851501, + "learning_rate": 8.669158541505817e-06, + "loss": 0.7683, + "step": 455 + }, + { + "epoch": 0.9434482758620689, + "grad_norm": 0.7960418462753296, + "learning_rate": 8.660964686991725e-06, + "loss": 0.7719, + "step": 456 + }, + { + "epoch": 0.9455172413793104, + "grad_norm": 0.761481761932373, + "learning_rate": 8.65274958340934e-06, + "loss": 0.7635, + "step": 457 + }, + { + "epoch": 0.9475862068965517, + "grad_norm": 0.846750795841217, + "learning_rate": 8.644513278440984e-06, + "loss": 0.7917, + "step": 458 + }, + { + "epoch": 0.9496551724137932, + "grad_norm": 0.8491324782371521, + "learning_rate": 8.636255819892037e-06, + "loss": 0.7826, + "step": 459 + }, + { + "epoch": 0.9517241379310345, + "grad_norm": 0.8026092648506165, + "learning_rate": 8.627977255690658e-06, + "loss": 0.7715, + "step": 460 + }, + { + "epoch": 0.9537931034482758, + "grad_norm": 0.8143620491027832, + "learning_rate": 8.61967763388751e-06, + "loss": 0.7724, + "step": 461 + }, + { + "epoch": 0.9558620689655173, + "grad_norm": 0.7983601689338684, + "learning_rate": 8.611357002655475e-06, + "loss": 0.8044, + "step": 462 + }, + { + "epoch": 0.9579310344827586, + "grad_norm": 0.9372730851173401, + "learning_rate": 8.603015410289387e-06, + "loss": 0.7635, + "step": 463 + }, + { + "epoch": 0.96, + "grad_norm": 0.809413731098175, + "learning_rate": 8.594652905205731e-06, + "loss": 0.7524, + "step": 464 + }, + { + "epoch": 0.9620689655172414, + "grad_norm": 0.78413987159729, + "learning_rate": 8.586269535942386e-06, + "loss": 0.7623, + "step": 465 + }, + { + "epoch": 0.9641379310344828, + "grad_norm": 0.8303465247154236, + "learning_rate": 8.577865351158324e-06, + "loss": 0.7832, + "step": 466 + }, + { + "epoch": 0.9662068965517241, + "grad_norm": 0.9170937538146973, + "learning_rate": 8.569440399633337e-06, + "loss": 0.7817, + "step": 467 + }, + { + "epoch": 0.9682758620689655, + "grad_norm": 0.835443377494812, + "learning_rate": 8.560994730267751e-06, + "loss": 0.7658, + "step": 468 + }, + { + "epoch": 0.9703448275862069, + "grad_norm": 0.8764263987541199, + "learning_rate": 8.552528392082147e-06, + "loss": 0.7479, + "step": 469 + }, + { + "epoch": 0.9724137931034482, + "grad_norm": 0.8065820336341858, + "learning_rate": 8.544041434217067e-06, + "loss": 0.7469, + "step": 470 + }, + { + "epoch": 0.9744827586206897, + "grad_norm": 0.895577073097229, + "learning_rate": 8.535533905932739e-06, + "loss": 0.7716, + "step": 471 + }, + { + "epoch": 0.976551724137931, + "grad_norm": 0.844038188457489, + "learning_rate": 8.52700585660878e-06, + "loss": 0.7586, + "step": 472 + }, + { + "epoch": 0.9786206896551725, + "grad_norm": 0.8391221761703491, + "learning_rate": 8.518457335743927e-06, + "loss": 0.785, + "step": 473 + }, + { + "epoch": 0.9806896551724138, + "grad_norm": 0.802529513835907, + "learning_rate": 8.509888392955725e-06, + "loss": 0.7786, + "step": 474 + }, + { + "epoch": 0.9827586206896551, + "grad_norm": 0.793171763420105, + "learning_rate": 8.50129907798026e-06, + "loss": 0.7542, + "step": 475 + }, + { + "epoch": 0.9848275862068966, + "grad_norm": 0.7531145811080933, + "learning_rate": 8.492689440671865e-06, + "loss": 0.7837, + "step": 476 + }, + { + "epoch": 0.9868965517241379, + "grad_norm": 0.7774952054023743, + "learning_rate": 8.484059531002822e-06, + "loss": 0.7702, + "step": 477 + }, + { + "epoch": 0.9889655172413793, + "grad_norm": 0.84196537733078, + "learning_rate": 8.475409399063081e-06, + "loss": 0.7628, + "step": 478 + }, + { + "epoch": 0.9910344827586207, + "grad_norm": 0.760192334651947, + "learning_rate": 8.46673909505997e-06, + "loss": 0.7549, + "step": 479 + }, + { + "epoch": 0.993103448275862, + "grad_norm": 0.7565990686416626, + "learning_rate": 8.458048669317897e-06, + "loss": 0.7473, + "step": 480 + }, + { + "epoch": 0.9951724137931034, + "grad_norm": 0.792762041091919, + "learning_rate": 8.44933817227806e-06, + "loss": 0.7631, + "step": 481 + }, + { + "epoch": 0.9972413793103448, + "grad_norm": 0.7413747906684875, + "learning_rate": 8.440607654498156e-06, + "loss": 0.778, + "step": 482 + }, + { + "epoch": 0.9993103448275862, + "grad_norm": 0.7627434730529785, + "learning_rate": 8.43185716665209e-06, + "loss": 0.7737, + "step": 483 + }, + { + "epoch": 1.0013793103448276, + "grad_norm": 0.8035737872123718, + "learning_rate": 8.423086759529676e-06, + "loss": 0.7007, + "step": 484 + }, + { + "epoch": 1.0034482758620689, + "grad_norm": 0.7981570363044739, + "learning_rate": 8.41429648403634e-06, + "loss": 0.6635, + "step": 485 + }, + { + "epoch": 1.0055172413793103, + "grad_norm": 0.7854894995689392, + "learning_rate": 8.405486391192834e-06, + "loss": 0.6601, + "step": 486 + }, + { + "epoch": 1.0075862068965518, + "grad_norm": 0.7979158759117126, + "learning_rate": 8.396656532134934e-06, + "loss": 0.6369, + "step": 487 + }, + { + "epoch": 1.0096551724137932, + "grad_norm": 0.8349688053131104, + "learning_rate": 8.387806958113139e-06, + "loss": 0.6569, + "step": 488 + }, + { + "epoch": 1.0117241379310344, + "grad_norm": 0.8813797235488892, + "learning_rate": 8.378937720492384e-06, + "loss": 0.6697, + "step": 489 + }, + { + "epoch": 1.013793103448276, + "grad_norm": 0.8171966671943665, + "learning_rate": 8.370048870751732e-06, + "loss": 0.6545, + "step": 490 + }, + { + "epoch": 1.0158620689655173, + "grad_norm": 0.8515467047691345, + "learning_rate": 8.361140460484081e-06, + "loss": 0.6476, + "step": 491 + }, + { + "epoch": 1.0179310344827586, + "grad_norm": 0.8771365284919739, + "learning_rate": 8.352212541395859e-06, + "loss": 0.6641, + "step": 492 + }, + { + "epoch": 1.02, + "grad_norm": 0.8186706304550171, + "learning_rate": 8.343265165306736e-06, + "loss": 0.6558, + "step": 493 + }, + { + "epoch": 1.0220689655172415, + "grad_norm": 0.821365237236023, + "learning_rate": 8.334298384149305e-06, + "loss": 0.6526, + "step": 494 + }, + { + "epoch": 1.0241379310344827, + "grad_norm": 0.8323445320129395, + "learning_rate": 8.325312249968795e-06, + "loss": 0.6788, + "step": 495 + }, + { + "epoch": 1.0262068965517241, + "grad_norm": 0.7858123183250427, + "learning_rate": 8.316306814922769e-06, + "loss": 0.6476, + "step": 496 + }, + { + "epoch": 1.0282758620689656, + "grad_norm": 0.8636062741279602, + "learning_rate": 8.307282131280805e-06, + "loss": 0.6494, + "step": 497 + }, + { + "epoch": 1.0303448275862068, + "grad_norm": 0.8002017736434937, + "learning_rate": 8.298238251424216e-06, + "loss": 0.6432, + "step": 498 + }, + { + "epoch": 1.0324137931034483, + "grad_norm": 0.852461040019989, + "learning_rate": 8.289175227845727e-06, + "loss": 0.6602, + "step": 499 + }, + { + "epoch": 1.0344827586206897, + "grad_norm": 0.782200813293457, + "learning_rate": 8.280093113149182e-06, + "loss": 0.6347, + "step": 500 + }, + { + "epoch": 1.036551724137931, + "grad_norm": 0.7889745831489563, + "learning_rate": 8.270991960049231e-06, + "loss": 0.6626, + "step": 501 + }, + { + "epoch": 1.0386206896551724, + "grad_norm": 0.7759472131729126, + "learning_rate": 8.261871821371026e-06, + "loss": 0.6374, + "step": 502 + }, + { + "epoch": 1.0406896551724139, + "grad_norm": 0.7325034737586975, + "learning_rate": 8.252732750049918e-06, + "loss": 0.6531, + "step": 503 + }, + { + "epoch": 1.042758620689655, + "grad_norm": 0.7706113457679749, + "learning_rate": 8.24357479913115e-06, + "loss": 0.6526, + "step": 504 + }, + { + "epoch": 1.0448275862068965, + "grad_norm": 0.7855322360992432, + "learning_rate": 8.234398021769541e-06, + "loss": 0.6443, + "step": 505 + }, + { + "epoch": 1.046896551724138, + "grad_norm": 0.7973263263702393, + "learning_rate": 8.225202471229187e-06, + "loss": 0.6422, + "step": 506 + }, + { + "epoch": 1.0489655172413792, + "grad_norm": 0.7677010297775269, + "learning_rate": 8.215988200883145e-06, + "loss": 0.6378, + "step": 507 + }, + { + "epoch": 1.0510344827586207, + "grad_norm": 0.8065165877342224, + "learning_rate": 8.206755264213128e-06, + "loss": 0.6291, + "step": 508 + }, + { + "epoch": 1.053103448275862, + "grad_norm": 0.7429850101470947, + "learning_rate": 8.19750371480919e-06, + "loss": 0.6671, + "step": 509 + }, + { + "epoch": 1.0551724137931036, + "grad_norm": 0.8575379252433777, + "learning_rate": 8.188233606369422e-06, + "loss": 0.6462, + "step": 510 + }, + { + "epoch": 1.0572413793103448, + "grad_norm": 0.7762051224708557, + "learning_rate": 8.178944992699632e-06, + "loss": 0.6743, + "step": 511 + }, + { + "epoch": 1.0593103448275862, + "grad_norm": 0.7541788220405579, + "learning_rate": 8.169637927713037e-06, + "loss": 0.6351, + "step": 512 + }, + { + "epoch": 1.0613793103448277, + "grad_norm": 0.7777592539787292, + "learning_rate": 8.160312465429952e-06, + "loss": 0.6506, + "step": 513 + }, + { + "epoch": 1.063448275862069, + "grad_norm": 0.7478598952293396, + "learning_rate": 8.150968659977472e-06, + "loss": 0.6781, + "step": 514 + }, + { + "epoch": 1.0655172413793104, + "grad_norm": 0.7685233950614929, + "learning_rate": 8.141606565589157e-06, + "loss": 0.6802, + "step": 515 + }, + { + "epoch": 1.0675862068965518, + "grad_norm": 0.7681663632392883, + "learning_rate": 8.132226236604728e-06, + "loss": 0.6421, + "step": 516 + }, + { + "epoch": 1.069655172413793, + "grad_norm": 0.7492855787277222, + "learning_rate": 8.122827727469737e-06, + "loss": 0.6445, + "step": 517 + }, + { + "epoch": 1.0717241379310345, + "grad_norm": 0.8009100556373596, + "learning_rate": 8.113411092735262e-06, + "loss": 0.6424, + "step": 518 + }, + { + "epoch": 1.073793103448276, + "grad_norm": 0.7607418894767761, + "learning_rate": 8.103976387057579e-06, + "loss": 0.6456, + "step": 519 + }, + { + "epoch": 1.0758620689655172, + "grad_norm": 0.805308997631073, + "learning_rate": 8.094523665197862e-06, + "loss": 0.6639, + "step": 520 + }, + { + "epoch": 1.0779310344827586, + "grad_norm": 0.7429332137107849, + "learning_rate": 8.085052982021849e-06, + "loss": 0.644, + "step": 521 + }, + { + "epoch": 1.08, + "grad_norm": 0.7760939598083496, + "learning_rate": 8.075564392499527e-06, + "loss": 0.6472, + "step": 522 + }, + { + "epoch": 1.0820689655172413, + "grad_norm": 0.8237507343292236, + "learning_rate": 8.066057951704821e-06, + "loss": 0.643, + "step": 523 + }, + { + "epoch": 1.0841379310344827, + "grad_norm": 0.7856370806694031, + "learning_rate": 8.056533714815268e-06, + "loss": 0.658, + "step": 524 + }, + { + "epoch": 1.0862068965517242, + "grad_norm": 0.7537881731987, + "learning_rate": 8.046991737111696e-06, + "loss": 0.6441, + "step": 525 + }, + { + "epoch": 1.0882758620689654, + "grad_norm": 0.7884009480476379, + "learning_rate": 8.037432073977902e-06, + "loss": 0.6253, + "step": 526 + }, + { + "epoch": 1.0903448275862069, + "grad_norm": 0.9002606868743896, + "learning_rate": 8.027854780900339e-06, + "loss": 0.6652, + "step": 527 + }, + { + "epoch": 1.0924137931034483, + "grad_norm": 0.7838542461395264, + "learning_rate": 8.018259913467785e-06, + "loss": 0.6783, + "step": 528 + }, + { + "epoch": 1.0944827586206896, + "grad_norm": 0.74989253282547, + "learning_rate": 8.008647527371022e-06, + "loss": 0.655, + "step": 529 + }, + { + "epoch": 1.096551724137931, + "grad_norm": 0.9094736576080322, + "learning_rate": 7.999017678402518e-06, + "loss": 0.6587, + "step": 530 + }, + { + "epoch": 1.0986206896551725, + "grad_norm": 0.7564888000488281, + "learning_rate": 7.989370422456098e-06, + "loss": 0.6577, + "step": 531 + }, + { + "epoch": 1.1006896551724137, + "grad_norm": 0.806409478187561, + "learning_rate": 7.979705815526621e-06, + "loss": 0.6666, + "step": 532 + }, + { + "epoch": 1.1027586206896551, + "grad_norm": 0.7861296534538269, + "learning_rate": 7.970023913709652e-06, + "loss": 0.6849, + "step": 533 + }, + { + "epoch": 1.1048275862068966, + "grad_norm": 0.7604372501373291, + "learning_rate": 7.960324773201147e-06, + "loss": 0.6363, + "step": 534 + }, + { + "epoch": 1.106896551724138, + "grad_norm": 0.778635561466217, + "learning_rate": 7.95060845029711e-06, + "loss": 0.6551, + "step": 535 + }, + { + "epoch": 1.1089655172413793, + "grad_norm": 0.8555791974067688, + "learning_rate": 7.94087500139328e-06, + "loss": 0.6587, + "step": 536 + }, + { + "epoch": 1.1110344827586207, + "grad_norm": 0.7580198645591736, + "learning_rate": 7.931124482984802e-06, + "loss": 0.678, + "step": 537 + }, + { + "epoch": 1.1131034482758622, + "grad_norm": 0.749212384223938, + "learning_rate": 7.921356951665894e-06, + "loss": 0.6235, + "step": 538 + }, + { + "epoch": 1.1151724137931034, + "grad_norm": 0.7692828178405762, + "learning_rate": 7.911572464129515e-06, + "loss": 0.663, + "step": 539 + }, + { + "epoch": 1.1172413793103448, + "grad_norm": 0.7994730472564697, + "learning_rate": 7.901771077167051e-06, + "loss": 0.6318, + "step": 540 + }, + { + "epoch": 1.1193103448275863, + "grad_norm": 0.7523401975631714, + "learning_rate": 7.891952847667973e-06, + "loss": 0.6528, + "step": 541 + }, + { + "epoch": 1.1213793103448275, + "grad_norm": 0.7888882160186768, + "learning_rate": 7.882117832619506e-06, + "loss": 0.6238, + "step": 542 + }, + { + "epoch": 1.123448275862069, + "grad_norm": 0.7809697389602661, + "learning_rate": 7.872266089106309e-06, + "loss": 0.634, + "step": 543 + }, + { + "epoch": 1.1255172413793104, + "grad_norm": 0.7669185400009155, + "learning_rate": 7.862397674310127e-06, + "loss": 0.6938, + "step": 544 + }, + { + "epoch": 1.1275862068965516, + "grad_norm": 0.7955731749534607, + "learning_rate": 7.85251264550948e-06, + "loss": 0.6711, + "step": 545 + }, + { + "epoch": 1.129655172413793, + "grad_norm": 0.7307600378990173, + "learning_rate": 7.842611060079312e-06, + "loss": 0.6459, + "step": 546 + }, + { + "epoch": 1.1317241379310345, + "grad_norm": 0.8730869889259338, + "learning_rate": 7.832692975490668e-06, + "loss": 0.6805, + "step": 547 + }, + { + "epoch": 1.1337931034482758, + "grad_norm": 0.7648000121116638, + "learning_rate": 7.822758449310357e-06, + "loss": 0.6502, + "step": 548 + }, + { + "epoch": 1.1358620689655172, + "grad_norm": 0.7544475197792053, + "learning_rate": 7.812807539200622e-06, + "loss": 0.6807, + "step": 549 + }, + { + "epoch": 1.1379310344827587, + "grad_norm": 0.7903164029121399, + "learning_rate": 7.802840302918794e-06, + "loss": 0.6667, + "step": 550 + }, + { + "epoch": 1.1400000000000001, + "grad_norm": 0.7563683390617371, + "learning_rate": 7.792856798316975e-06, + "loss": 0.633, + "step": 551 + }, + { + "epoch": 1.1420689655172414, + "grad_norm": 0.8001116514205933, + "learning_rate": 7.782857083341685e-06, + "loss": 0.6297, + "step": 552 + }, + { + "epoch": 1.1441379310344828, + "grad_norm": 0.8235406279563904, + "learning_rate": 7.772841216033534e-06, + "loss": 0.6372, + "step": 553 + }, + { + "epoch": 1.1462068965517243, + "grad_norm": 0.7986905574798584, + "learning_rate": 7.762809254526884e-06, + "loss": 0.6727, + "step": 554 + }, + { + "epoch": 1.1482758620689655, + "grad_norm": 0.7706049680709839, + "learning_rate": 7.752761257049517e-06, + "loss": 0.6775, + "step": 555 + }, + { + "epoch": 1.150344827586207, + "grad_norm": 0.8530354499816895, + "learning_rate": 7.74269728192228e-06, + "loss": 0.6551, + "step": 556 + }, + { + "epoch": 1.1524137931034484, + "grad_norm": 0.8466417789459229, + "learning_rate": 7.732617387558769e-06, + "loss": 0.6418, + "step": 557 + }, + { + "epoch": 1.1544827586206896, + "grad_norm": 0.7542173862457275, + "learning_rate": 7.722521632464972e-06, + "loss": 0.6017, + "step": 558 + }, + { + "epoch": 1.156551724137931, + "grad_norm": 0.7655349373817444, + "learning_rate": 7.712410075238936e-06, + "loss": 0.6454, + "step": 559 + }, + { + "epoch": 1.1586206896551725, + "grad_norm": 0.7444957494735718, + "learning_rate": 7.702282774570434e-06, + "loss": 0.653, + "step": 560 + }, + { + "epoch": 1.1606896551724137, + "grad_norm": 0.7831370830535889, + "learning_rate": 7.692139789240611e-06, + "loss": 0.6443, + "step": 561 + }, + { + "epoch": 1.1627586206896552, + "grad_norm": 0.7594817876815796, + "learning_rate": 7.681981178121652e-06, + "loss": 0.6422, + "step": 562 + }, + { + "epoch": 1.1648275862068966, + "grad_norm": 0.7494958639144897, + "learning_rate": 7.671807000176434e-06, + "loss": 0.6217, + "step": 563 + }, + { + "epoch": 1.1668965517241379, + "grad_norm": 0.862666130065918, + "learning_rate": 7.661617314458194e-06, + "loss": 0.6465, + "step": 564 + }, + { + "epoch": 1.1689655172413793, + "grad_norm": 0.7856999635696411, + "learning_rate": 7.651412180110176e-06, + "loss": 0.6482, + "step": 565 + }, + { + "epoch": 1.1710344827586208, + "grad_norm": 0.7493500709533691, + "learning_rate": 7.641191656365288e-06, + "loss": 0.6378, + "step": 566 + }, + { + "epoch": 1.173103448275862, + "grad_norm": 0.7933719754219055, + "learning_rate": 7.630955802545768e-06, + "loss": 0.6481, + "step": 567 + }, + { + "epoch": 1.1751724137931034, + "grad_norm": 0.7814990282058716, + "learning_rate": 7.6207046780628295e-06, + "loss": 0.6434, + "step": 568 + }, + { + "epoch": 1.177241379310345, + "grad_norm": 0.7389763593673706, + "learning_rate": 7.61043834241632e-06, + "loss": 0.6241, + "step": 569 + }, + { + "epoch": 1.1793103448275861, + "grad_norm": 0.7658818364143372, + "learning_rate": 7.600156855194376e-06, + "loss": 0.6446, + "step": 570 + }, + { + "epoch": 1.1813793103448276, + "grad_norm": 0.8013659119606018, + "learning_rate": 7.589860276073082e-06, + "loss": 0.6253, + "step": 571 + }, + { + "epoch": 1.183448275862069, + "grad_norm": 0.7773886919021606, + "learning_rate": 7.579548664816112e-06, + "loss": 0.6574, + "step": 572 + }, + { + "epoch": 1.1855172413793102, + "grad_norm": 0.8565280437469482, + "learning_rate": 7.569222081274396e-06, + "loss": 0.6781, + "step": 573 + }, + { + "epoch": 1.1875862068965517, + "grad_norm": 0.75011146068573, + "learning_rate": 7.558880585385762e-06, + "loss": 0.6628, + "step": 574 + }, + { + "epoch": 1.1896551724137931, + "grad_norm": 0.854688823223114, + "learning_rate": 7.548524237174595e-06, + "loss": 0.6442, + "step": 575 + }, + { + "epoch": 1.1917241379310344, + "grad_norm": 0.8342920541763306, + "learning_rate": 7.538153096751486e-06, + "loss": 0.6493, + "step": 576 + }, + { + "epoch": 1.1937931034482758, + "grad_norm": 0.7875855565071106, + "learning_rate": 7.527767224312883e-06, + "loss": 0.6428, + "step": 577 + }, + { + "epoch": 1.1958620689655173, + "grad_norm": 0.8200221061706543, + "learning_rate": 7.517366680140743e-06, + "loss": 0.6735, + "step": 578 + }, + { + "epoch": 1.1979310344827585, + "grad_norm": 0.8583515882492065, + "learning_rate": 7.50695152460218e-06, + "loss": 0.6432, + "step": 579 + }, + { + "epoch": 1.2, + "grad_norm": 0.7881141304969788, + "learning_rate": 7.496521818149114e-06, + "loss": 0.6455, + "step": 580 + }, + { + "epoch": 1.2020689655172414, + "grad_norm": 0.8400844931602478, + "learning_rate": 7.4860776213179264e-06, + "loss": 0.6569, + "step": 581 + }, + { + "epoch": 1.2041379310344829, + "grad_norm": 0.8359171748161316, + "learning_rate": 7.4756189947291e-06, + "loss": 0.637, + "step": 582 + }, + { + "epoch": 1.206206896551724, + "grad_norm": 0.8451258540153503, + "learning_rate": 7.465145999086874e-06, + "loss": 0.6504, + "step": 583 + }, + { + "epoch": 1.2082758620689655, + "grad_norm": 0.7891321778297424, + "learning_rate": 7.454658695178885e-06, + "loss": 0.6712, + "step": 584 + }, + { + "epoch": 1.210344827586207, + "grad_norm": 0.8851990103721619, + "learning_rate": 7.44415714387582e-06, + "loss": 0.6334, + "step": 585 + }, + { + "epoch": 1.2124137931034482, + "grad_norm": 0.9024987816810608, + "learning_rate": 7.433641406131065e-06, + "loss": 0.6604, + "step": 586 + }, + { + "epoch": 1.2144827586206897, + "grad_norm": 0.7987050414085388, + "learning_rate": 7.4231115429803405e-06, + "loss": 0.6633, + "step": 587 + }, + { + "epoch": 1.2165517241379311, + "grad_norm": 0.7416420578956604, + "learning_rate": 7.41256761554136e-06, + "loss": 0.6401, + "step": 588 + }, + { + "epoch": 1.2186206896551723, + "grad_norm": 0.8680900931358337, + "learning_rate": 7.4020096850134635e-06, + "loss": 0.6553, + "step": 589 + }, + { + "epoch": 1.2206896551724138, + "grad_norm": 0.8396124243736267, + "learning_rate": 7.391437812677274e-06, + "loss": 0.6374, + "step": 590 + }, + { + "epoch": 1.2227586206896552, + "grad_norm": 0.7313915491104126, + "learning_rate": 7.380852059894331e-06, + "loss": 0.6733, + "step": 591 + }, + { + "epoch": 1.2248275862068965, + "grad_norm": 0.8062239289283752, + "learning_rate": 7.370252488106744e-06, + "loss": 0.6697, + "step": 592 + }, + { + "epoch": 1.226896551724138, + "grad_norm": 0.8707490563392639, + "learning_rate": 7.359639158836828e-06, + "loss": 0.6517, + "step": 593 + }, + { + "epoch": 1.2289655172413794, + "grad_norm": 0.8169229030609131, + "learning_rate": 7.349012133686748e-06, + "loss": 0.6507, + "step": 594 + }, + { + "epoch": 1.2310344827586206, + "grad_norm": 0.8145736455917358, + "learning_rate": 7.3383714743381664e-06, + "loss": 0.6594, + "step": 595 + }, + { + "epoch": 1.233103448275862, + "grad_norm": 0.7559329271316528, + "learning_rate": 7.327717242551882e-06, + "loss": 0.6685, + "step": 596 + }, + { + "epoch": 1.2351724137931035, + "grad_norm": 0.840366542339325, + "learning_rate": 7.317049500167466e-06, + "loss": 0.6152, + "step": 597 + }, + { + "epoch": 1.237241379310345, + "grad_norm": 0.8220683932304382, + "learning_rate": 7.306368309102914e-06, + "loss": 0.6615, + "step": 598 + }, + { + "epoch": 1.2393103448275862, + "grad_norm": 0.75614994764328, + "learning_rate": 7.295673731354278e-06, + "loss": 0.6451, + "step": 599 + }, + { + "epoch": 1.2413793103448276, + "grad_norm": 0.8044620156288147, + "learning_rate": 7.2849658289953084e-06, + "loss": 0.6482, + "step": 600 + }, + { + "epoch": 1.243448275862069, + "grad_norm": 0.7796791791915894, + "learning_rate": 7.2742446641770985e-06, + "loss": 0.6371, + "step": 601 + }, + { + "epoch": 1.2455172413793103, + "grad_norm": 0.739811897277832, + "learning_rate": 7.263510299127714e-06, + "loss": 0.6489, + "step": 602 + }, + { + "epoch": 1.2475862068965518, + "grad_norm": 0.7257277965545654, + "learning_rate": 7.252762796151843e-06, + "loss": 0.648, + "step": 603 + }, + { + "epoch": 1.2496551724137932, + "grad_norm": 0.7809808850288391, + "learning_rate": 7.242002217630428e-06, + "loss": 0.6584, + "step": 604 + }, + { + "epoch": 1.2517241379310344, + "grad_norm": 0.8291626572608948, + "learning_rate": 7.231228626020303e-06, + "loss": 0.652, + "step": 605 + }, + { + "epoch": 1.2537931034482759, + "grad_norm": 0.7942583560943604, + "learning_rate": 7.220442083853834e-06, + "loss": 0.6538, + "step": 606 + }, + { + "epoch": 1.2558620689655173, + "grad_norm": 0.7212340235710144, + "learning_rate": 7.209642653738555e-06, + "loss": 0.6351, + "step": 607 + }, + { + "epoch": 1.2579310344827586, + "grad_norm": 0.8261105418205261, + "learning_rate": 7.198830398356807e-06, + "loss": 0.6725, + "step": 608 + }, + { + "epoch": 1.26, + "grad_norm": 0.8113548755645752, + "learning_rate": 7.188005380465365e-06, + "loss": 0.6654, + "step": 609 + }, + { + "epoch": 1.2620689655172415, + "grad_norm": 0.76350337266922, + "learning_rate": 7.177167662895087e-06, + "loss": 0.6775, + "step": 610 + }, + { + "epoch": 1.2641379310344827, + "grad_norm": 0.6983400583267212, + "learning_rate": 7.166317308550541e-06, + "loss": 0.6494, + "step": 611 + }, + { + "epoch": 1.2662068965517241, + "grad_norm": 0.7737719416618347, + "learning_rate": 7.1554543804096425e-06, + "loss": 0.6344, + "step": 612 + }, + { + "epoch": 1.2682758620689656, + "grad_norm": 0.9658696055412292, + "learning_rate": 7.144578941523283e-06, + "loss": 0.6472, + "step": 613 + }, + { + "epoch": 1.2703448275862068, + "grad_norm": 0.7466915845870972, + "learning_rate": 7.1336910550149775e-06, + "loss": 0.6659, + "step": 614 + }, + { + "epoch": 1.2724137931034483, + "grad_norm": 0.7958390116691589, + "learning_rate": 7.122790784080482e-06, + "loss": 0.6792, + "step": 615 + }, + { + "epoch": 1.2744827586206897, + "grad_norm": 0.8669654726982117, + "learning_rate": 7.111878191987441e-06, + "loss": 0.6508, + "step": 616 + }, + { + "epoch": 1.276551724137931, + "grad_norm": 0.8021039366722107, + "learning_rate": 7.10095334207501e-06, + "loss": 0.6379, + "step": 617 + }, + { + "epoch": 1.2786206896551724, + "grad_norm": 0.8026660680770874, + "learning_rate": 7.090016297753492e-06, + "loss": 0.6554, + "step": 618 + }, + { + "epoch": 1.2806896551724138, + "grad_norm": 0.7808569669723511, + "learning_rate": 7.079067122503969e-06, + "loss": 0.6693, + "step": 619 + }, + { + "epoch": 1.282758620689655, + "grad_norm": 0.8397783637046814, + "learning_rate": 7.0681058798779335e-06, + "loss": 0.6484, + "step": 620 + }, + { + "epoch": 1.2848275862068965, + "grad_norm": 0.9213140606880188, + "learning_rate": 7.057132633496924e-06, + "loss": 0.6603, + "step": 621 + }, + { + "epoch": 1.286896551724138, + "grad_norm": 0.7552454471588135, + "learning_rate": 7.046147447052146e-06, + "loss": 0.6536, + "step": 622 + }, + { + "epoch": 1.2889655172413792, + "grad_norm": 0.7723549604415894, + "learning_rate": 7.03515038430411e-06, + "loss": 0.6549, + "step": 623 + }, + { + "epoch": 1.2910344827586206, + "grad_norm": 0.965324878692627, + "learning_rate": 7.024141509082259e-06, + "loss": 0.6552, + "step": 624 + }, + { + "epoch": 1.293103448275862, + "grad_norm": 0.8971583843231201, + "learning_rate": 7.013120885284599e-06, + "loss": 0.6667, + "step": 625 + }, + { + "epoch": 1.2951724137931033, + "grad_norm": 0.8151547908782959, + "learning_rate": 7.002088576877325e-06, + "loss": 0.6451, + "step": 626 + }, + { + "epoch": 1.2972413793103448, + "grad_norm": 0.7296470999717712, + "learning_rate": 6.991044647894456e-06, + "loss": 0.6517, + "step": 627 + }, + { + "epoch": 1.2993103448275862, + "grad_norm": 0.9281187057495117, + "learning_rate": 6.9799891624374565e-06, + "loss": 0.6777, + "step": 628 + }, + { + "epoch": 1.3013793103448275, + "grad_norm": 0.9756149053573608, + "learning_rate": 6.968922184674868e-06, + "loss": 0.6432, + "step": 629 + }, + { + "epoch": 1.303448275862069, + "grad_norm": 0.7917487025260925, + "learning_rate": 6.957843778841937e-06, + "loss": 0.6461, + "step": 630 + }, + { + "epoch": 1.3055172413793104, + "grad_norm": 0.7548885941505432, + "learning_rate": 6.946754009240241e-06, + "loss": 0.6323, + "step": 631 + }, + { + "epoch": 1.3075862068965518, + "grad_norm": 0.8275778889656067, + "learning_rate": 6.935652940237313e-06, + "loss": 0.6651, + "step": 632 + }, + { + "epoch": 1.309655172413793, + "grad_norm": 0.8038797974586487, + "learning_rate": 6.924540636266272e-06, + "loss": 0.6631, + "step": 633 + }, + { + "epoch": 1.3117241379310345, + "grad_norm": 0.7313756346702576, + "learning_rate": 6.913417161825449e-06, + "loss": 0.6292, + "step": 634 + }, + { + "epoch": 1.313793103448276, + "grad_norm": 0.7938227653503418, + "learning_rate": 6.902282581478009e-06, + "loss": 0.643, + "step": 635 + }, + { + "epoch": 1.3158620689655172, + "grad_norm": 0.7737557291984558, + "learning_rate": 6.891136959851576e-06, + "loss": 0.6506, + "step": 636 + }, + { + "epoch": 1.3179310344827586, + "grad_norm": 0.7778476476669312, + "learning_rate": 6.879980361637865e-06, + "loss": 0.6683, + "step": 637 + }, + { + "epoch": 1.32, + "grad_norm": 0.8388012051582336, + "learning_rate": 6.868812851592299e-06, + "loss": 0.6508, + "step": 638 + }, + { + "epoch": 1.3220689655172415, + "grad_norm": 0.8127637505531311, + "learning_rate": 6.857634494533636e-06, + "loss": 0.6521, + "step": 639 + }, + { + "epoch": 1.3241379310344827, + "grad_norm": 0.718436062335968, + "learning_rate": 6.846445355343591e-06, + "loss": 0.6625, + "step": 640 + }, + { + "epoch": 1.3262068965517242, + "grad_norm": 0.7375659346580505, + "learning_rate": 6.835245498966461e-06, + "loss": 0.6293, + "step": 641 + }, + { + "epoch": 1.3282758620689656, + "grad_norm": 0.7901964783668518, + "learning_rate": 6.824034990408753e-06, + "loss": 0.6281, + "step": 642 + }, + { + "epoch": 1.3303448275862069, + "grad_norm": 0.7931618094444275, + "learning_rate": 6.8128138947387966e-06, + "loss": 0.6634, + "step": 643 + }, + { + "epoch": 1.3324137931034483, + "grad_norm": 0.7482447624206543, + "learning_rate": 6.801582277086371e-06, + "loss": 0.6446, + "step": 644 + }, + { + "epoch": 1.3344827586206898, + "grad_norm": 0.8103066086769104, + "learning_rate": 6.790340202642333e-06, + "loss": 0.6436, + "step": 645 + }, + { + "epoch": 1.336551724137931, + "grad_norm": 0.7419809699058533, + "learning_rate": 6.7790877366582255e-06, + "loss": 0.6632, + "step": 646 + }, + { + "epoch": 1.3386206896551724, + "grad_norm": 0.7484923005104065, + "learning_rate": 6.76782494444591e-06, + "loss": 0.6609, + "step": 647 + }, + { + "epoch": 1.340689655172414, + "grad_norm": 0.7377297282218933, + "learning_rate": 6.756551891377185e-06, + "loss": 0.6557, + "step": 648 + }, + { + "epoch": 1.3427586206896551, + "grad_norm": 0.8004779815673828, + "learning_rate": 6.7452686428834045e-06, + "loss": 0.6413, + "step": 649 + }, + { + "epoch": 1.3448275862068966, + "grad_norm": 0.7940952181816101, + "learning_rate": 6.733975264455097e-06, + "loss": 0.6604, + "step": 650 + }, + { + "epoch": 1.346896551724138, + "grad_norm": 0.734728991985321, + "learning_rate": 6.722671821641591e-06, + "loss": 0.6445, + "step": 651 + }, + { + "epoch": 1.3489655172413793, + "grad_norm": 0.811326801776886, + "learning_rate": 6.711358380050628e-06, + "loss": 0.6637, + "step": 652 + }, + { + "epoch": 1.3510344827586207, + "grad_norm": 0.8057741522789001, + "learning_rate": 6.700035005347983e-06, + "loss": 0.6385, + "step": 653 + }, + { + "epoch": 1.3531034482758622, + "grad_norm": 0.7723029851913452, + "learning_rate": 6.68870176325709e-06, + "loss": 0.6514, + "step": 654 + }, + { + "epoch": 1.3551724137931034, + "grad_norm": 0.7846975326538086, + "learning_rate": 6.677358719558655e-06, + "loss": 0.6312, + "step": 655 + }, + { + "epoch": 1.3572413793103448, + "grad_norm": 0.8255680203437805, + "learning_rate": 6.666005940090271e-06, + "loss": 0.6651, + "step": 656 + }, + { + "epoch": 1.3593103448275863, + "grad_norm": 0.7784463763237, + "learning_rate": 6.654643490746042e-06, + "loss": 0.6654, + "step": 657 + }, + { + "epoch": 1.3613793103448275, + "grad_norm": 0.8567957878112793, + "learning_rate": 6.643271437476196e-06, + "loss": 0.6493, + "step": 658 + }, + { + "epoch": 1.363448275862069, + "grad_norm": 0.8170894980430603, + "learning_rate": 6.6318898462867086e-06, + "loss": 0.674, + "step": 659 + }, + { + "epoch": 1.3655172413793104, + "grad_norm": 0.777654230594635, + "learning_rate": 6.6204987832389115e-06, + "loss": 0.7003, + "step": 660 + }, + { + "epoch": 1.3675862068965516, + "grad_norm": 0.7541632056236267, + "learning_rate": 6.609098314449116e-06, + "loss": 0.6488, + "step": 661 + }, + { + "epoch": 1.369655172413793, + "grad_norm": 0.806873083114624, + "learning_rate": 6.597688506088224e-06, + "loss": 0.6484, + "step": 662 + }, + { + "epoch": 1.3717241379310345, + "grad_norm": 0.7729332447052002, + "learning_rate": 6.586269424381349e-06, + "loss": 0.63, + "step": 663 + }, + { + "epoch": 1.3737931034482758, + "grad_norm": 0.7325103282928467, + "learning_rate": 6.574841135607426e-06, + "loss": 0.6377, + "step": 664 + }, + { + "epoch": 1.3758620689655172, + "grad_norm": 0.7824835777282715, + "learning_rate": 6.563403706098833e-06, + "loss": 0.6493, + "step": 665 + }, + { + "epoch": 1.3779310344827587, + "grad_norm": 0.8269271850585938, + "learning_rate": 6.551957202241001e-06, + "loss": 0.6763, + "step": 666 + }, + { + "epoch": 1.38, + "grad_norm": 0.7548254728317261, + "learning_rate": 6.540501690472032e-06, + "loss": 0.6463, + "step": 667 + }, + { + "epoch": 1.3820689655172413, + "grad_norm": 0.728628396987915, + "learning_rate": 6.529037237282309e-06, + "loss": 0.6571, + "step": 668 + }, + { + "epoch": 1.3841379310344828, + "grad_norm": 0.7487698793411255, + "learning_rate": 6.517563909214119e-06, + "loss": 0.6819, + "step": 669 + }, + { + "epoch": 1.386206896551724, + "grad_norm": 0.7777491807937622, + "learning_rate": 6.5060817728612544e-06, + "loss": 0.6384, + "step": 670 + }, + { + "epoch": 1.3882758620689655, + "grad_norm": 0.7405235171318054, + "learning_rate": 6.4945908948686355e-06, + "loss": 0.6637, + "step": 671 + }, + { + "epoch": 1.390344827586207, + "grad_norm": 0.730686366558075, + "learning_rate": 6.4830913419319205e-06, + "loss": 0.6264, + "step": 672 + }, + { + "epoch": 1.3924137931034481, + "grad_norm": 0.7911974191665649, + "learning_rate": 6.471583180797121e-06, + "loss": 0.6251, + "step": 673 + }, + { + "epoch": 1.3944827586206896, + "grad_norm": 0.7909592390060425, + "learning_rate": 6.460066478260209e-06, + "loss": 0.6639, + "step": 674 + }, + { + "epoch": 1.396551724137931, + "grad_norm": 0.7824447154998779, + "learning_rate": 6.448541301166737e-06, + "loss": 0.6771, + "step": 675 + }, + { + "epoch": 1.3986206896551723, + "grad_norm": 0.7654966711997986, + "learning_rate": 6.437007716411441e-06, + "loss": 0.6661, + "step": 676 + }, + { + "epoch": 1.4006896551724137, + "grad_norm": 0.7122900485992432, + "learning_rate": 6.4254657909378615e-06, + "loss": 0.6724, + "step": 677 + }, + { + "epoch": 1.4027586206896552, + "grad_norm": 0.7968724370002747, + "learning_rate": 6.4139155917379445e-06, + "loss": 0.6348, + "step": 678 + }, + { + "epoch": 1.4048275862068966, + "grad_norm": 0.7387704849243164, + "learning_rate": 6.402357185851669e-06, + "loss": 0.6537, + "step": 679 + }, + { + "epoch": 1.4068965517241379, + "grad_norm": 0.7359346747398376, + "learning_rate": 6.3907906403666355e-06, + "loss": 0.6649, + "step": 680 + }, + { + "epoch": 1.4089655172413793, + "grad_norm": 0.7153935432434082, + "learning_rate": 6.379216022417695e-06, + "loss": 0.6574, + "step": 681 + }, + { + "epoch": 1.4110344827586208, + "grad_norm": 0.7676519751548767, + "learning_rate": 6.367633399186554e-06, + "loss": 0.6811, + "step": 682 + }, + { + "epoch": 1.4131034482758622, + "grad_norm": 0.8044560551643372, + "learning_rate": 6.3560428379013795e-06, + "loss": 0.6496, + "step": 683 + }, + { + "epoch": 1.4151724137931034, + "grad_norm": 0.7096127867698669, + "learning_rate": 6.344444405836414e-06, + "loss": 0.6493, + "step": 684 + }, + { + "epoch": 1.4172413793103449, + "grad_norm": 0.7820800542831421, + "learning_rate": 6.332838170311586e-06, + "loss": 0.6561, + "step": 685 + }, + { + "epoch": 1.4193103448275863, + "grad_norm": 0.7546415328979492, + "learning_rate": 6.321224198692115e-06, + "loss": 0.672, + "step": 686 + }, + { + "epoch": 1.4213793103448276, + "grad_norm": 0.7297517657279968, + "learning_rate": 6.309602558388122e-06, + "loss": 0.6697, + "step": 687 + }, + { + "epoch": 1.423448275862069, + "grad_norm": 0.8077932000160217, + "learning_rate": 6.297973316854241e-06, + "loss": 0.6603, + "step": 688 + }, + { + "epoch": 1.4255172413793105, + "grad_norm": 0.7859988808631897, + "learning_rate": 6.286336541589224e-06, + "loss": 0.6724, + "step": 689 + }, + { + "epoch": 1.4275862068965517, + "grad_norm": 0.7754270434379578, + "learning_rate": 6.27469230013555e-06, + "loss": 0.6529, + "step": 690 + }, + { + "epoch": 1.4296551724137931, + "grad_norm": 0.7747129797935486, + "learning_rate": 6.263040660079031e-06, + "loss": 0.635, + "step": 691 + }, + { + "epoch": 1.4317241379310346, + "grad_norm": 0.7329117655754089, + "learning_rate": 6.25138168904843e-06, + "loss": 0.6489, + "step": 692 + }, + { + "epoch": 1.4337931034482758, + "grad_norm": 0.7887305021286011, + "learning_rate": 6.239715454715054e-06, + "loss": 0.6317, + "step": 693 + }, + { + "epoch": 1.4358620689655173, + "grad_norm": 0.7606728672981262, + "learning_rate": 6.22804202479237e-06, + "loss": 0.6604, + "step": 694 + }, + { + "epoch": 1.4379310344827587, + "grad_norm": 0.7513334155082703, + "learning_rate": 6.216361467035608e-06, + "loss": 0.6437, + "step": 695 + }, + { + "epoch": 1.44, + "grad_norm": 0.8091455698013306, + "learning_rate": 6.204673849241372e-06, + "loss": 0.6305, + "step": 696 + }, + { + "epoch": 1.4420689655172414, + "grad_norm": 0.7892200946807861, + "learning_rate": 6.192979239247243e-06, + "loss": 0.6549, + "step": 697 + }, + { + "epoch": 1.4441379310344828, + "grad_norm": 0.7235889434814453, + "learning_rate": 6.181277704931386e-06, + "loss": 0.651, + "step": 698 + }, + { + "epoch": 1.446206896551724, + "grad_norm": 0.7446338534355164, + "learning_rate": 6.169569314212157e-06, + "loss": 0.636, + "step": 699 + }, + { + "epoch": 1.4482758620689655, + "grad_norm": 0.7943323254585266, + "learning_rate": 6.1578541350477076e-06, + "loss": 0.6331, + "step": 700 + }, + { + "epoch": 1.450344827586207, + "grad_norm": 0.6900318264961243, + "learning_rate": 6.146132235435591e-06, + "loss": 0.6569, + "step": 701 + }, + { + "epoch": 1.4524137931034482, + "grad_norm": 0.7237994074821472, + "learning_rate": 6.1344036834123695e-06, + "loss": 0.6224, + "step": 702 + }, + { + "epoch": 1.4544827586206897, + "grad_norm": 0.7555676102638245, + "learning_rate": 6.1226685470532125e-06, + "loss": 0.6633, + "step": 703 + }, + { + "epoch": 1.456551724137931, + "grad_norm": 0.72216796875, + "learning_rate": 6.1109268944715125e-06, + "loss": 0.6551, + "step": 704 + }, + { + "epoch": 1.4586206896551723, + "grad_norm": 0.7122531533241272, + "learning_rate": 6.099178793818479e-06, + "loss": 0.6363, + "step": 705 + }, + { + "epoch": 1.4606896551724138, + "grad_norm": 0.7397927045822144, + "learning_rate": 6.0874243132827505e-06, + "loss": 0.6569, + "step": 706 + }, + { + "epoch": 1.4627586206896552, + "grad_norm": 0.729145348072052, + "learning_rate": 6.075663521089994e-06, + "loss": 0.628, + "step": 707 + }, + { + "epoch": 1.4648275862068965, + "grad_norm": 0.7200533151626587, + "learning_rate": 6.06389648550251e-06, + "loss": 0.6554, + "step": 708 + }, + { + "epoch": 1.466896551724138, + "grad_norm": 0.7103410959243774, + "learning_rate": 6.0521232748188416e-06, + "loss": 0.6481, + "step": 709 + }, + { + "epoch": 1.4689655172413794, + "grad_norm": 0.7074390053749084, + "learning_rate": 6.040343957373367e-06, + "loss": 0.6274, + "step": 710 + }, + { + "epoch": 1.4710344827586206, + "grad_norm": 0.734455406665802, + "learning_rate": 6.028558601535915e-06, + "loss": 0.661, + "step": 711 + }, + { + "epoch": 1.473103448275862, + "grad_norm": 0.7593621015548706, + "learning_rate": 6.016767275711359e-06, + "loss": 0.6339, + "step": 712 + }, + { + "epoch": 1.4751724137931035, + "grad_norm": 0.7968884706497192, + "learning_rate": 6.0049700483392256e-06, + "loss": 0.6564, + "step": 713 + }, + { + "epoch": 1.4772413793103447, + "grad_norm": 0.7559491991996765, + "learning_rate": 5.993166987893294e-06, + "loss": 0.6525, + "step": 714 + }, + { + "epoch": 1.4793103448275862, + "grad_norm": 0.7332967519760132, + "learning_rate": 5.981358162881202e-06, + "loss": 0.6591, + "step": 715 + }, + { + "epoch": 1.4813793103448276, + "grad_norm": 0.7570938467979431, + "learning_rate": 5.969543641844044e-06, + "loss": 0.6558, + "step": 716 + }, + { + "epoch": 1.4834482758620688, + "grad_norm": 0.7297980189323425, + "learning_rate": 5.957723493355977e-06, + "loss": 0.6396, + "step": 717 + }, + { + "epoch": 1.4855172413793103, + "grad_norm": 0.7381563782691956, + "learning_rate": 5.945897786023817e-06, + "loss": 0.6539, + "step": 718 + }, + { + "epoch": 1.4875862068965517, + "grad_norm": 0.7127878665924072, + "learning_rate": 5.9340665884866535e-06, + "loss": 0.6587, + "step": 719 + }, + { + "epoch": 1.489655172413793, + "grad_norm": 0.7142752408981323, + "learning_rate": 5.922229969415432e-06, + "loss": 0.6867, + "step": 720 + }, + { + "epoch": 1.4917241379310344, + "grad_norm": 0.7629202604293823, + "learning_rate": 5.910387997512573e-06, + "loss": 0.6381, + "step": 721 + }, + { + "epoch": 1.4937931034482759, + "grad_norm": 0.716674268245697, + "learning_rate": 5.898540741511564e-06, + "loss": 0.6406, + "step": 722 + }, + { + "epoch": 1.4958620689655173, + "grad_norm": 0.732208251953125, + "learning_rate": 5.8866882701765605e-06, + "loss": 0.6366, + "step": 723 + }, + { + "epoch": 1.4979310344827585, + "grad_norm": 0.7054344415664673, + "learning_rate": 5.8748306523019925e-06, + "loss": 0.6491, + "step": 724 + }, + { + "epoch": 1.5, + "grad_norm": 0.7091671228408813, + "learning_rate": 5.86296795671216e-06, + "loss": 0.6448, + "step": 725 + }, + { + "epoch": 1.5020689655172412, + "grad_norm": 0.7083839178085327, + "learning_rate": 5.851100252260835e-06, + "loss": 0.6302, + "step": 726 + }, + { + "epoch": 1.504137931034483, + "grad_norm": 0.727642834186554, + "learning_rate": 5.839227607830862e-06, + "loss": 0.6375, + "step": 727 + }, + { + "epoch": 1.5062068965517241, + "grad_norm": 0.7836138606071472, + "learning_rate": 5.827350092333758e-06, + "loss": 0.6429, + "step": 728 + }, + { + "epoch": 1.5082758620689654, + "grad_norm": 0.7098393440246582, + "learning_rate": 5.815467774709314e-06, + "loss": 0.6579, + "step": 729 + }, + { + "epoch": 1.510344827586207, + "grad_norm": 0.749321460723877, + "learning_rate": 5.803580723925193e-06, + "loss": 0.6361, + "step": 730 + }, + { + "epoch": 1.5124137931034483, + "grad_norm": 0.7477707862854004, + "learning_rate": 5.791689008976531e-06, + "loss": 0.6525, + "step": 731 + }, + { + "epoch": 1.5144827586206897, + "grad_norm": 0.7622563242912292, + "learning_rate": 5.779792698885534e-06, + "loss": 0.667, + "step": 732 + }, + { + "epoch": 1.5165517241379312, + "grad_norm": 0.7400147318840027, + "learning_rate": 5.767891862701081e-06, + "loss": 0.618, + "step": 733 + }, + { + "epoch": 1.5186206896551724, + "grad_norm": 0.7715649008750916, + "learning_rate": 5.755986569498321e-06, + "loss": 0.6501, + "step": 734 + }, + { + "epoch": 1.5206896551724138, + "grad_norm": 0.7207207679748535, + "learning_rate": 5.744076888378272e-06, + "loss": 0.6616, + "step": 735 + }, + { + "epoch": 1.5227586206896553, + "grad_norm": 0.7672625780105591, + "learning_rate": 5.732162888467421e-06, + "loss": 0.6117, + "step": 736 + }, + { + "epoch": 1.5248275862068965, + "grad_norm": 0.8607432246208191, + "learning_rate": 5.7202446389173225e-06, + "loss": 0.6272, + "step": 737 + }, + { + "epoch": 1.526896551724138, + "grad_norm": 0.8004416227340698, + "learning_rate": 5.708322208904196e-06, + "loss": 0.6538, + "step": 738 + }, + { + "epoch": 1.5289655172413794, + "grad_norm": 0.7796547412872314, + "learning_rate": 5.696395667628526e-06, + "loss": 0.6301, + "step": 739 + }, + { + "epoch": 1.5310344827586206, + "grad_norm": 0.7893610596656799, + "learning_rate": 5.6844650843146595e-06, + "loss": 0.6299, + "step": 740 + }, + { + "epoch": 1.533103448275862, + "grad_norm": 0.7845308184623718, + "learning_rate": 5.672530528210405e-06, + "loss": 0.6635, + "step": 741 + }, + { + "epoch": 1.5351724137931035, + "grad_norm": 0.7558560371398926, + "learning_rate": 5.660592068586629e-06, + "loss": 0.6637, + "step": 742 + }, + { + "epoch": 1.5372413793103448, + "grad_norm": 0.7075406908988953, + "learning_rate": 5.648649774736855e-06, + "loss": 0.6433, + "step": 743 + }, + { + "epoch": 1.5393103448275862, + "grad_norm": 0.7514592409133911, + "learning_rate": 5.6367037159768625e-06, + "loss": 0.6547, + "step": 744 + }, + { + "epoch": 1.5413793103448277, + "grad_norm": 0.7782887816429138, + "learning_rate": 5.624753961644281e-06, + "loss": 0.6541, + "step": 745 + }, + { + "epoch": 1.543448275862069, + "grad_norm": 0.7897704243659973, + "learning_rate": 5.612800581098193e-06, + "loss": 0.6582, + "step": 746 + }, + { + "epoch": 1.5455172413793103, + "grad_norm": 0.7592941522598267, + "learning_rate": 5.600843643718728e-06, + "loss": 0.6309, + "step": 747 + }, + { + "epoch": 1.5475862068965518, + "grad_norm": 0.7635563015937805, + "learning_rate": 5.588883218906653e-06, + "loss": 0.6407, + "step": 748 + }, + { + "epoch": 1.549655172413793, + "grad_norm": 0.7806128859519958, + "learning_rate": 5.57691937608299e-06, + "loss": 0.659, + "step": 749 + }, + { + "epoch": 1.5517241379310345, + "grad_norm": 0.7472797632217407, + "learning_rate": 5.564952184688588e-06, + "loss": 0.661, + "step": 750 + }, + { + "epoch": 1.553793103448276, + "grad_norm": 0.7442753314971924, + "learning_rate": 5.552981714183738e-06, + "loss": 0.6582, + "step": 751 + }, + { + "epoch": 1.5558620689655172, + "grad_norm": 0.7324591279029846, + "learning_rate": 5.5410080340477634e-06, + "loss": 0.6454, + "step": 752 + }, + { + "epoch": 1.5579310344827586, + "grad_norm": 0.7217804789543152, + "learning_rate": 5.529031213778615e-06, + "loss": 0.6441, + "step": 753 + }, + { + "epoch": 1.56, + "grad_norm": 0.7129271626472473, + "learning_rate": 5.517051322892468e-06, + "loss": 0.6642, + "step": 754 + }, + { + "epoch": 1.5620689655172413, + "grad_norm": 0.6992490291595459, + "learning_rate": 5.505068430923327e-06, + "loss": 0.6382, + "step": 755 + }, + { + "epoch": 1.5641379310344827, + "grad_norm": 0.7824325561523438, + "learning_rate": 5.4930826074226085e-06, + "loss": 0.6657, + "step": 756 + }, + { + "epoch": 1.5662068965517242, + "grad_norm": 0.7302783727645874, + "learning_rate": 5.481093921958749e-06, + "loss": 0.6643, + "step": 757 + }, + { + "epoch": 1.5682758620689654, + "grad_norm": 0.7289795875549316, + "learning_rate": 5.469102444116791e-06, + "loss": 0.6429, + "step": 758 + }, + { + "epoch": 1.5703448275862069, + "grad_norm": 0.7278138995170593, + "learning_rate": 5.457108243497993e-06, + "loss": 0.6518, + "step": 759 + }, + { + "epoch": 1.5724137931034483, + "grad_norm": 0.7361022233963013, + "learning_rate": 5.445111389719408e-06, + "loss": 0.6445, + "step": 760 + }, + { + "epoch": 1.5744827586206895, + "grad_norm": 0.7398816347122192, + "learning_rate": 5.433111952413496e-06, + "loss": 0.6601, + "step": 761 + }, + { + "epoch": 1.576551724137931, + "grad_norm": 0.7518286108970642, + "learning_rate": 5.421110001227705e-06, + "loss": 0.6119, + "step": 762 + }, + { + "epoch": 1.5786206896551724, + "grad_norm": 0.766607403755188, + "learning_rate": 5.409105605824082e-06, + "loss": 0.6555, + "step": 763 + }, + { + "epoch": 1.5806896551724137, + "grad_norm": 0.7912081480026245, + "learning_rate": 5.3970988358788565e-06, + "loss": 0.6652, + "step": 764 + }, + { + "epoch": 1.5827586206896553, + "grad_norm": 0.7829515337944031, + "learning_rate": 5.385089761082039e-06, + "loss": 0.6272, + "step": 765 + }, + { + "epoch": 1.5848275862068966, + "grad_norm": 0.7310935854911804, + "learning_rate": 5.3730784511370204e-06, + "loss": 0.6714, + "step": 766 + }, + { + "epoch": 1.5868965517241378, + "grad_norm": 0.7284607291221619, + "learning_rate": 5.361064975760166e-06, + "loss": 0.6563, + "step": 767 + }, + { + "epoch": 1.5889655172413795, + "grad_norm": 0.7712138891220093, + "learning_rate": 5.349049404680407e-06, + "loss": 0.6611, + "step": 768 + }, + { + "epoch": 1.5910344827586207, + "grad_norm": 0.7651541233062744, + "learning_rate": 5.3370318076388405e-06, + "loss": 0.6278, + "step": 769 + }, + { + "epoch": 1.593103448275862, + "grad_norm": 0.7919555902481079, + "learning_rate": 5.32501225438832e-06, + "loss": 0.6497, + "step": 770 + }, + { + "epoch": 1.5951724137931036, + "grad_norm": 0.6936086416244507, + "learning_rate": 5.3129908146930565e-06, + "loss": 0.6327, + "step": 771 + }, + { + "epoch": 1.5972413793103448, + "grad_norm": 0.7541401386260986, + "learning_rate": 5.300967558328208e-06, + "loss": 0.6141, + "step": 772 + }, + { + "epoch": 1.599310344827586, + "grad_norm": 0.785559892654419, + "learning_rate": 5.288942555079479e-06, + "loss": 0.6763, + "step": 773 + }, + { + "epoch": 1.6013793103448277, + "grad_norm": 0.7877768874168396, + "learning_rate": 5.2769158747427115e-06, + "loss": 0.6647, + "step": 774 + }, + { + "epoch": 1.603448275862069, + "grad_norm": 0.7559152245521545, + "learning_rate": 5.264887587123483e-06, + "loss": 0.6589, + "step": 775 + }, + { + "epoch": 1.6055172413793104, + "grad_norm": 0.7801457643508911, + "learning_rate": 5.2528577620366986e-06, + "loss": 0.6643, + "step": 776 + }, + { + "epoch": 1.6075862068965519, + "grad_norm": 0.7516634464263916, + "learning_rate": 5.240826469306187e-06, + "loss": 0.6471, + "step": 777 + }, + { + "epoch": 1.609655172413793, + "grad_norm": 0.7641648054122925, + "learning_rate": 5.228793778764297e-06, + "loss": 0.6796, + "step": 778 + }, + { + "epoch": 1.6117241379310345, + "grad_norm": 0.7582610845565796, + "learning_rate": 5.216759760251493e-06, + "loss": 0.6602, + "step": 779 + }, + { + "epoch": 1.613793103448276, + "grad_norm": 0.7532828450202942, + "learning_rate": 5.204724483615941e-06, + "loss": 0.6594, + "step": 780 + }, + { + "epoch": 1.6158620689655172, + "grad_norm": 0.774940550327301, + "learning_rate": 5.1926880187131134e-06, + "loss": 0.656, + "step": 781 + }, + { + "epoch": 1.6179310344827587, + "grad_norm": 0.7041785717010498, + "learning_rate": 5.18065043540538e-06, + "loss": 0.645, + "step": 782 + }, + { + "epoch": 1.62, + "grad_norm": 0.7526170611381531, + "learning_rate": 5.168611803561599e-06, + "loss": 0.658, + "step": 783 + }, + { + "epoch": 1.6220689655172413, + "grad_norm": 0.8174700736999512, + "learning_rate": 5.156572193056718e-06, + "loss": 0.6502, + "step": 784 + }, + { + "epoch": 1.6241379310344828, + "grad_norm": 0.7578688263893127, + "learning_rate": 5.144531673771364e-06, + "loss": 0.6664, + "step": 785 + }, + { + "epoch": 1.6262068965517242, + "grad_norm": 0.7004210948944092, + "learning_rate": 5.132490315591437e-06, + "loss": 0.6351, + "step": 786 + }, + { + "epoch": 1.6282758620689655, + "grad_norm": 0.7185120582580566, + "learning_rate": 5.1204481884077075e-06, + "loss": 0.6148, + "step": 787 + }, + { + "epoch": 1.630344827586207, + "grad_norm": 0.8190487623214722, + "learning_rate": 5.10840536211541e-06, + "loss": 0.6241, + "step": 788 + }, + { + "epoch": 1.6324137931034484, + "grad_norm": 0.8276094198226929, + "learning_rate": 5.096361906613836e-06, + "loss": 0.6241, + "step": 789 + }, + { + "epoch": 1.6344827586206896, + "grad_norm": 0.7432799339294434, + "learning_rate": 5.084317891805928e-06, + "loss": 0.6408, + "step": 790 + }, + { + "epoch": 1.636551724137931, + "grad_norm": 0.7277430891990662, + "learning_rate": 5.072273387597877e-06, + "loss": 0.6406, + "step": 791 + }, + { + "epoch": 1.6386206896551725, + "grad_norm": 0.7360126972198486, + "learning_rate": 5.0602284638987145e-06, + "loss": 0.6422, + "step": 792 + }, + { + "epoch": 1.6406896551724137, + "grad_norm": 0.8584643006324768, + "learning_rate": 5.048183190619904e-06, + "loss": 0.6329, + "step": 793 + }, + { + "epoch": 1.6427586206896552, + "grad_norm": 0.7649242281913757, + "learning_rate": 5.036137637674943e-06, + "loss": 0.6356, + "step": 794 + }, + { + "epoch": 1.6448275862068966, + "grad_norm": 0.7129114270210266, + "learning_rate": 5.0240918749789455e-06, + "loss": 0.6334, + "step": 795 + }, + { + "epoch": 1.6468965517241378, + "grad_norm": 0.7862198352813721, + "learning_rate": 5.01204597244825e-06, + "loss": 0.675, + "step": 796 + }, + { + "epoch": 1.6489655172413793, + "grad_norm": 0.7846953272819519, + "learning_rate": 5e-06, + "loss": 0.6573, + "step": 797 + }, + { + "epoch": 1.6510344827586207, + "grad_norm": 0.7969672083854675, + "learning_rate": 4.987954027551751e-06, + "loss": 0.652, + "step": 798 + }, + { + "epoch": 1.653103448275862, + "grad_norm": 0.6976673007011414, + "learning_rate": 4.975908125021055e-06, + "loss": 0.6251, + "step": 799 + }, + { + "epoch": 1.6551724137931034, + "grad_norm": 0.7470307946205139, + "learning_rate": 4.963862362325058e-06, + "loss": 0.6489, + "step": 800 + }, + { + "epoch": 1.6572413793103449, + "grad_norm": 0.7507964372634888, + "learning_rate": 4.951816809380098e-06, + "loss": 0.6355, + "step": 801 + }, + { + "epoch": 1.659310344827586, + "grad_norm": 0.7489534616470337, + "learning_rate": 4.939771536101286e-06, + "loss": 0.6555, + "step": 802 + }, + { + "epoch": 1.6613793103448276, + "grad_norm": 0.7056592106819153, + "learning_rate": 4.9277266124021245e-06, + "loss": 0.6497, + "step": 803 + }, + { + "epoch": 1.663448275862069, + "grad_norm": 0.7045999765396118, + "learning_rate": 4.915682108194073e-06, + "loss": 0.6547, + "step": 804 + }, + { + "epoch": 1.6655172413793102, + "grad_norm": 0.7271073460578918, + "learning_rate": 4.903638093386167e-06, + "loss": 0.6267, + "step": 805 + }, + { + "epoch": 1.6675862068965517, + "grad_norm": 0.7735942602157593, + "learning_rate": 4.891594637884591e-06, + "loss": 0.6369, + "step": 806 + }, + { + "epoch": 1.6696551724137931, + "grad_norm": 0.7303586006164551, + "learning_rate": 4.879551811592295e-06, + "loss": 0.6438, + "step": 807 + }, + { + "epoch": 1.6717241379310344, + "grad_norm": 0.734630286693573, + "learning_rate": 4.867509684408564e-06, + "loss": 0.6273, + "step": 808 + }, + { + "epoch": 1.6737931034482758, + "grad_norm": 0.7351275682449341, + "learning_rate": 4.855468326228638e-06, + "loss": 0.6628, + "step": 809 + }, + { + "epoch": 1.6758620689655173, + "grad_norm": 0.7179361581802368, + "learning_rate": 4.843427806943283e-06, + "loss": 0.6422, + "step": 810 + }, + { + "epoch": 1.6779310344827585, + "grad_norm": 0.6784632802009583, + "learning_rate": 4.831388196438402e-06, + "loss": 0.6407, + "step": 811 + }, + { + "epoch": 1.6800000000000002, + "grad_norm": 0.73552006483078, + "learning_rate": 4.819349564594622e-06, + "loss": 0.6448, + "step": 812 + }, + { + "epoch": 1.6820689655172414, + "grad_norm": 0.7421280145645142, + "learning_rate": 4.807311981286888e-06, + "loss": 0.6621, + "step": 813 + }, + { + "epoch": 1.6841379310344826, + "grad_norm": 0.6813828349113464, + "learning_rate": 4.79527551638406e-06, + "loss": 0.6442, + "step": 814 + }, + { + "epoch": 1.6862068965517243, + "grad_norm": 0.7029781937599182, + "learning_rate": 4.783240239748509e-06, + "loss": 0.6351, + "step": 815 + }, + { + "epoch": 1.6882758620689655, + "grad_norm": 0.6841586828231812, + "learning_rate": 4.7712062212357045e-06, + "loss": 0.6373, + "step": 816 + }, + { + "epoch": 1.6903448275862067, + "grad_norm": 0.7366838455200195, + "learning_rate": 4.7591735306938144e-06, + "loss": 0.6733, + "step": 817 + }, + { + "epoch": 1.6924137931034484, + "grad_norm": 0.7882569432258606, + "learning_rate": 4.747142237963305e-06, + "loss": 0.6266, + "step": 818 + }, + { + "epoch": 1.6944827586206896, + "grad_norm": 0.6938716173171997, + "learning_rate": 4.7351124128765185e-06, + "loss": 0.6318, + "step": 819 + }, + { + "epoch": 1.6965517241379309, + "grad_norm": 0.7524465918540955, + "learning_rate": 4.723084125257291e-06, + "loss": 0.6437, + "step": 820 + }, + { + "epoch": 1.6986206896551725, + "grad_norm": 0.7383887767791748, + "learning_rate": 4.711057444920522e-06, + "loss": 0.6525, + "step": 821 + }, + { + "epoch": 1.7006896551724138, + "grad_norm": 0.7824906706809998, + "learning_rate": 4.699032441671794e-06, + "loss": 0.6458, + "step": 822 + }, + { + "epoch": 1.7027586206896552, + "grad_norm": 0.7252950072288513, + "learning_rate": 4.687009185306945e-06, + "loss": 0.6536, + "step": 823 + }, + { + "epoch": 1.7048275862068967, + "grad_norm": 0.7240720391273499, + "learning_rate": 4.67498774561168e-06, + "loss": 0.6583, + "step": 824 + }, + { + "epoch": 1.706896551724138, + "grad_norm": 0.739566445350647, + "learning_rate": 4.662968192361161e-06, + "loss": 0.6758, + "step": 825 + }, + { + "epoch": 1.7089655172413794, + "grad_norm": 0.7385093569755554, + "learning_rate": 4.650950595319593e-06, + "loss": 0.6273, + "step": 826 + }, + { + "epoch": 1.7110344827586208, + "grad_norm": 0.7286804914474487, + "learning_rate": 4.6389350242398354e-06, + "loss": 0.6436, + "step": 827 + }, + { + "epoch": 1.713103448275862, + "grad_norm": 0.7011599540710449, + "learning_rate": 4.6269215488629795e-06, + "loss": 0.6532, + "step": 828 + }, + { + "epoch": 1.7151724137931035, + "grad_norm": 0.7191659212112427, + "learning_rate": 4.614910238917963e-06, + "loss": 0.6544, + "step": 829 + }, + { + "epoch": 1.717241379310345, + "grad_norm": 0.7047889232635498, + "learning_rate": 4.602901164121145e-06, + "loss": 0.6421, + "step": 830 + }, + { + "epoch": 1.7193103448275862, + "grad_norm": 0.7209073901176453, + "learning_rate": 4.59089439417592e-06, + "loss": 0.6289, + "step": 831 + }, + { + "epoch": 1.7213793103448276, + "grad_norm": 0.7176074385643005, + "learning_rate": 4.578889998772296e-06, + "loss": 0.642, + "step": 832 + }, + { + "epoch": 1.723448275862069, + "grad_norm": 0.7749189138412476, + "learning_rate": 4.5668880475865074e-06, + "loss": 0.6755, + "step": 833 + }, + { + "epoch": 1.7255172413793103, + "grad_norm": 0.689206600189209, + "learning_rate": 4.554888610280593e-06, + "loss": 0.6552, + "step": 834 + }, + { + "epoch": 1.7275862068965517, + "grad_norm": 0.7584705352783203, + "learning_rate": 4.542891756502008e-06, + "loss": 0.6109, + "step": 835 + }, + { + "epoch": 1.7296551724137932, + "grad_norm": 0.7560999393463135, + "learning_rate": 4.5308975558832095e-06, + "loss": 0.6619, + "step": 836 + }, + { + "epoch": 1.7317241379310344, + "grad_norm": 0.8325402140617371, + "learning_rate": 4.518906078041252e-06, + "loss": 0.6344, + "step": 837 + }, + { + "epoch": 1.7337931034482759, + "grad_norm": 0.7222752571105957, + "learning_rate": 4.506917392577393e-06, + "loss": 0.6255, + "step": 838 + }, + { + "epoch": 1.7358620689655173, + "grad_norm": 0.7850416898727417, + "learning_rate": 4.494931569076674e-06, + "loss": 0.6213, + "step": 839 + }, + { + "epoch": 1.7379310344827585, + "grad_norm": 0.7065498232841492, + "learning_rate": 4.482948677107533e-06, + "loss": 0.6146, + "step": 840 + }, + { + "epoch": 1.74, + "grad_norm": 0.6791179776191711, + "learning_rate": 4.4709687862213866e-06, + "loss": 0.6461, + "step": 841 + }, + { + "epoch": 1.7420689655172414, + "grad_norm": 0.750182032585144, + "learning_rate": 4.458991965952238e-06, + "loss": 0.652, + "step": 842 + }, + { + "epoch": 1.7441379310344827, + "grad_norm": 0.700097382068634, + "learning_rate": 4.447018285816263e-06, + "loss": 0.6437, + "step": 843 + }, + { + "epoch": 1.7462068965517241, + "grad_norm": 0.7411333322525024, + "learning_rate": 4.435047815311414e-06, + "loss": 0.6502, + "step": 844 + }, + { + "epoch": 1.7482758620689656, + "grad_norm": 0.7353593707084656, + "learning_rate": 4.423080623917012e-06, + "loss": 0.6502, + "step": 845 + }, + { + "epoch": 1.7503448275862068, + "grad_norm": 0.8049408793449402, + "learning_rate": 4.411116781093348e-06, + "loss": 0.6482, + "step": 846 + }, + { + "epoch": 1.7524137931034482, + "grad_norm": 0.7221590876579285, + "learning_rate": 4.399156356281274e-06, + "loss": 0.6549, + "step": 847 + }, + { + "epoch": 1.7544827586206897, + "grad_norm": 0.7125415802001953, + "learning_rate": 4.3871994189018075e-06, + "loss": 0.6409, + "step": 848 + }, + { + "epoch": 1.756551724137931, + "grad_norm": 0.7120684385299683, + "learning_rate": 4.3752460383557195e-06, + "loss": 0.6476, + "step": 849 + }, + { + "epoch": 1.7586206896551724, + "grad_norm": 0.7340549230575562, + "learning_rate": 4.363296284023139e-06, + "loss": 0.6606, + "step": 850 + }, + { + "epoch": 1.7606896551724138, + "grad_norm": 0.8672581911087036, + "learning_rate": 4.351350225263147e-06, + "loss": 0.6447, + "step": 851 + }, + { + "epoch": 1.762758620689655, + "grad_norm": 0.7489596009254456, + "learning_rate": 4.339407931413372e-06, + "loss": 0.634, + "step": 852 + }, + { + "epoch": 1.7648275862068965, + "grad_norm": 0.7057939171791077, + "learning_rate": 4.327469471789597e-06, + "loss": 0.6683, + "step": 853 + }, + { + "epoch": 1.766896551724138, + "grad_norm": 0.7291073799133301, + "learning_rate": 4.315534915685341e-06, + "loss": 0.6766, + "step": 854 + }, + { + "epoch": 1.7689655172413792, + "grad_norm": 0.7649169564247131, + "learning_rate": 4.303604332371476e-06, + "loss": 0.649, + "step": 855 + }, + { + "epoch": 1.7710344827586209, + "grad_norm": 0.7476722002029419, + "learning_rate": 4.291677791095805e-06, + "loss": 0.6111, + "step": 856 + }, + { + "epoch": 1.773103448275862, + "grad_norm": 0.7485122680664062, + "learning_rate": 4.27975536108268e-06, + "loss": 0.6375, + "step": 857 + }, + { + "epoch": 1.7751724137931033, + "grad_norm": 0.7042410373687744, + "learning_rate": 4.26783711153258e-06, + "loss": 0.6406, + "step": 858 + }, + { + "epoch": 1.777241379310345, + "grad_norm": 0.7430243492126465, + "learning_rate": 4.2559231116217305e-06, + "loss": 0.6511, + "step": 859 + }, + { + "epoch": 1.7793103448275862, + "grad_norm": 0.6917579174041748, + "learning_rate": 4.244013430501681e-06, + "loss": 0.6316, + "step": 860 + }, + { + "epoch": 1.7813793103448274, + "grad_norm": 0.6834431290626526, + "learning_rate": 4.2321081372989195e-06, + "loss": 0.6413, + "step": 861 + }, + { + "epoch": 1.783448275862069, + "grad_norm": 0.6975676417350769, + "learning_rate": 4.220207301114467e-06, + "loss": 0.6331, + "step": 862 + }, + { + "epoch": 1.7855172413793103, + "grad_norm": 0.7284821271896362, + "learning_rate": 4.208310991023469e-06, + "loss": 0.6431, + "step": 863 + }, + { + "epoch": 1.7875862068965516, + "grad_norm": 0.742372453212738, + "learning_rate": 4.1964192760748085e-06, + "loss": 0.6307, + "step": 864 + }, + { + "epoch": 1.7896551724137932, + "grad_norm": 0.7629566788673401, + "learning_rate": 4.184532225290687e-06, + "loss": 0.6186, + "step": 865 + }, + { + "epoch": 1.7917241379310345, + "grad_norm": 0.7036967277526855, + "learning_rate": 4.172649907666244e-06, + "loss": 0.6212, + "step": 866 + }, + { + "epoch": 1.793793103448276, + "grad_norm": 0.6951998472213745, + "learning_rate": 4.16077239216914e-06, + "loss": 0.6023, + "step": 867 + }, + { + "epoch": 1.7958620689655174, + "grad_norm": 0.728022575378418, + "learning_rate": 4.148899747739168e-06, + "loss": 0.6444, + "step": 868 + }, + { + "epoch": 1.7979310344827586, + "grad_norm": 0.6999719142913818, + "learning_rate": 4.137032043287841e-06, + "loss": 0.6368, + "step": 869 + }, + { + "epoch": 1.8, + "grad_norm": 0.7263539433479309, + "learning_rate": 4.125169347698009e-06, + "loss": 0.6584, + "step": 870 + }, + { + "epoch": 1.8020689655172415, + "grad_norm": 0.7751326560974121, + "learning_rate": 4.11331172982344e-06, + "loss": 0.6245, + "step": 871 + }, + { + "epoch": 1.8041379310344827, + "grad_norm": 0.7183523178100586, + "learning_rate": 4.101459258488438e-06, + "loss": 0.5938, + "step": 872 + }, + { + "epoch": 1.8062068965517242, + "grad_norm": 0.7222279906272888, + "learning_rate": 4.089612002487428e-06, + "loss": 0.6162, + "step": 873 + }, + { + "epoch": 1.8082758620689656, + "grad_norm": 0.7456408143043518, + "learning_rate": 4.077770030584569e-06, + "loss": 0.6602, + "step": 874 + }, + { + "epoch": 1.8103448275862069, + "grad_norm": 0.7470499873161316, + "learning_rate": 4.065933411513349e-06, + "loss": 0.653, + "step": 875 + }, + { + "epoch": 1.8124137931034483, + "grad_norm": 0.7037544846534729, + "learning_rate": 4.054102213976184e-06, + "loss": 0.6216, + "step": 876 + }, + { + "epoch": 1.8144827586206898, + "grad_norm": 0.7381490468978882, + "learning_rate": 4.042276506644024e-06, + "loss": 0.6422, + "step": 877 + }, + { + "epoch": 1.816551724137931, + "grad_norm": 0.7278012037277222, + "learning_rate": 4.030456358155957e-06, + "loss": 0.6475, + "step": 878 + }, + { + "epoch": 1.8186206896551724, + "grad_norm": 0.7335582971572876, + "learning_rate": 4.018641837118799e-06, + "loss": 0.643, + "step": 879 + }, + { + "epoch": 1.8206896551724139, + "grad_norm": 0.6921178698539734, + "learning_rate": 4.006833012106707e-06, + "loss": 0.6241, + "step": 880 + }, + { + "epoch": 1.822758620689655, + "grad_norm": 0.7078073024749756, + "learning_rate": 3.995029951660777e-06, + "loss": 0.6295, + "step": 881 + }, + { + "epoch": 1.8248275862068966, + "grad_norm": 0.6960477232933044, + "learning_rate": 3.983232724288642e-06, + "loss": 0.6596, + "step": 882 + }, + { + "epoch": 1.826896551724138, + "grad_norm": 0.7017378211021423, + "learning_rate": 3.971441398464088e-06, + "loss": 0.611, + "step": 883 + }, + { + "epoch": 1.8289655172413792, + "grad_norm": 0.729068398475647, + "learning_rate": 3.959656042626634e-06, + "loss": 0.6184, + "step": 884 + }, + { + "epoch": 1.8310344827586207, + "grad_norm": 0.7174490690231323, + "learning_rate": 3.94787672518116e-06, + "loss": 0.6383, + "step": 885 + }, + { + "epoch": 1.8331034482758621, + "grad_norm": 0.777277946472168, + "learning_rate": 3.9361035144974905e-06, + "loss": 0.6508, + "step": 886 + }, + { + "epoch": 1.8351724137931034, + "grad_norm": 0.7446042895317078, + "learning_rate": 3.924336478910007e-06, + "loss": 0.6588, + "step": 887 + }, + { + "epoch": 1.8372413793103448, + "grad_norm": 0.7238491773605347, + "learning_rate": 3.912575686717251e-06, + "loss": 0.6394, + "step": 888 + }, + { + "epoch": 1.8393103448275863, + "grad_norm": 0.7243683338165283, + "learning_rate": 3.900821206181521e-06, + "loss": 0.6529, + "step": 889 + }, + { + "epoch": 1.8413793103448275, + "grad_norm": 0.719829261302948, + "learning_rate": 3.889073105528489e-06, + "loss": 0.6535, + "step": 890 + }, + { + "epoch": 1.843448275862069, + "grad_norm": 0.7592584490776062, + "learning_rate": 3.8773314529467875e-06, + "loss": 0.6391, + "step": 891 + }, + { + "epoch": 1.8455172413793104, + "grad_norm": 0.7423802018165588, + "learning_rate": 3.865596316587633e-06, + "loss": 0.643, + "step": 892 + }, + { + "epoch": 1.8475862068965516, + "grad_norm": 0.7308037281036377, + "learning_rate": 3.853867764564409e-06, + "loss": 0.6329, + "step": 893 + }, + { + "epoch": 1.849655172413793, + "grad_norm": 0.7450445294380188, + "learning_rate": 3.842145864952295e-06, + "loss": 0.6516, + "step": 894 + }, + { + "epoch": 1.8517241379310345, + "grad_norm": 0.8020148277282715, + "learning_rate": 3.830430685787844e-06, + "loss": 0.6378, + "step": 895 + }, + { + "epoch": 1.8537931034482757, + "grad_norm": 0.6937201023101807, + "learning_rate": 3.818722295068616e-06, + "loss": 0.646, + "step": 896 + }, + { + "epoch": 1.8558620689655172, + "grad_norm": 0.7302286028862, + "learning_rate": 3.8070207607527587e-06, + "loss": 0.6489, + "step": 897 + }, + { + "epoch": 1.8579310344827586, + "grad_norm": 0.7561166286468506, + "learning_rate": 3.7953261507586282e-06, + "loss": 0.6249, + "step": 898 + }, + { + "epoch": 1.8599999999999999, + "grad_norm": 0.6970481276512146, + "learning_rate": 3.7836385329643933e-06, + "loss": 0.6355, + "step": 899 + }, + { + "epoch": 1.8620689655172413, + "grad_norm": 0.7228543162345886, + "learning_rate": 3.77195797520763e-06, + "loss": 0.6435, + "step": 900 + }, + { + "epoch": 1.8641379310344828, + "grad_norm": 0.7981168627738953, + "learning_rate": 3.760284545284947e-06, + "loss": 0.6294, + "step": 901 + }, + { + "epoch": 1.866206896551724, + "grad_norm": 0.758095383644104, + "learning_rate": 3.7486183109515696e-06, + "loss": 0.6652, + "step": 902 + }, + { + "epoch": 1.8682758620689657, + "grad_norm": 0.7205570936203003, + "learning_rate": 3.7369593399209704e-06, + "loss": 0.6236, + "step": 903 + }, + { + "epoch": 1.870344827586207, + "grad_norm": 0.7436397075653076, + "learning_rate": 3.725307699864452e-06, + "loss": 0.639, + "step": 904 + }, + { + "epoch": 1.8724137931034481, + "grad_norm": 0.7406452298164368, + "learning_rate": 3.7136634584107787e-06, + "loss": 0.6274, + "step": 905 + }, + { + "epoch": 1.8744827586206898, + "grad_norm": 0.7269127368927002, + "learning_rate": 3.7020266831457598e-06, + "loss": 0.6544, + "step": 906 + }, + { + "epoch": 1.876551724137931, + "grad_norm": 0.732607901096344, + "learning_rate": 3.6903974416118786e-06, + "loss": 0.6784, + "step": 907 + }, + { + "epoch": 1.8786206896551723, + "grad_norm": 0.7445371150970459, + "learning_rate": 3.6787758013078863e-06, + "loss": 0.6298, + "step": 908 + }, + { + "epoch": 1.880689655172414, + "grad_norm": 0.7168945670127869, + "learning_rate": 3.6671618296884147e-06, + "loss": 0.6331, + "step": 909 + }, + { + "epoch": 1.8827586206896552, + "grad_norm": 0.7478596568107605, + "learning_rate": 3.655555594163587e-06, + "loss": 0.6396, + "step": 910 + }, + { + "epoch": 1.8848275862068964, + "grad_norm": 0.7174217700958252, + "learning_rate": 3.6439571620986213e-06, + "loss": 0.6619, + "step": 911 + }, + { + "epoch": 1.886896551724138, + "grad_norm": 0.7665561437606812, + "learning_rate": 3.6323666008134465e-06, + "loss": 0.6513, + "step": 912 + }, + { + "epoch": 1.8889655172413793, + "grad_norm": 0.6955576539039612, + "learning_rate": 3.620783977582305e-06, + "loss": 0.6409, + "step": 913 + }, + { + "epoch": 1.8910344827586207, + "grad_norm": 0.6954272389411926, + "learning_rate": 3.6092093596333654e-06, + "loss": 0.6423, + "step": 914 + }, + { + "epoch": 1.8931034482758622, + "grad_norm": 0.744321882724762, + "learning_rate": 3.5976428141483326e-06, + "loss": 0.6142, + "step": 915 + }, + { + "epoch": 1.8951724137931034, + "grad_norm": 0.7438650131225586, + "learning_rate": 3.586084408262056e-06, + "loss": 0.6602, + "step": 916 + }, + { + "epoch": 1.8972413793103449, + "grad_norm": 0.6955951452255249, + "learning_rate": 3.5745342090621406e-06, + "loss": 0.6311, + "step": 917 + }, + { + "epoch": 1.8993103448275863, + "grad_norm": 0.6816698312759399, + "learning_rate": 3.5629922835885618e-06, + "loss": 0.6514, + "step": 918 + }, + { + "epoch": 1.9013793103448275, + "grad_norm": 0.6610486507415771, + "learning_rate": 3.551458698833265e-06, + "loss": 0.6401, + "step": 919 + }, + { + "epoch": 1.903448275862069, + "grad_norm": 0.7027138471603394, + "learning_rate": 3.5399335217397936e-06, + "loss": 0.6452, + "step": 920 + }, + { + "epoch": 1.9055172413793104, + "grad_norm": 0.715492844581604, + "learning_rate": 3.528416819202881e-06, + "loss": 0.6386, + "step": 921 + }, + { + "epoch": 1.9075862068965517, + "grad_norm": 0.7160024642944336, + "learning_rate": 3.5169086580680824e-06, + "loss": 0.6416, + "step": 922 + }, + { + "epoch": 1.9096551724137931, + "grad_norm": 0.7222967743873596, + "learning_rate": 3.5054091051313666e-06, + "loss": 0.6377, + "step": 923 + }, + { + "epoch": 1.9117241379310346, + "grad_norm": 0.6950160264968872, + "learning_rate": 3.493918227138746e-06, + "loss": 0.6458, + "step": 924 + }, + { + "epoch": 1.9137931034482758, + "grad_norm": 0.6922501921653748, + "learning_rate": 3.4824360907858824e-06, + "loss": 0.6528, + "step": 925 + }, + { + "epoch": 1.9158620689655173, + "grad_norm": 0.7411875128746033, + "learning_rate": 3.47096276271769e-06, + "loss": 0.6178, + "step": 926 + }, + { + "epoch": 1.9179310344827587, + "grad_norm": 0.6832643151283264, + "learning_rate": 3.45949830952797e-06, + "loss": 0.6365, + "step": 927 + }, + { + "epoch": 1.92, + "grad_norm": 0.7130672931671143, + "learning_rate": 3.4480427977590004e-06, + "loss": 0.6319, + "step": 928 + }, + { + "epoch": 1.9220689655172414, + "grad_norm": 0.6737676858901978, + "learning_rate": 3.43659629390117e-06, + "loss": 0.6644, + "step": 929 + }, + { + "epoch": 1.9241379310344828, + "grad_norm": 0.707264244556427, + "learning_rate": 3.4251588643925757e-06, + "loss": 0.659, + "step": 930 + }, + { + "epoch": 1.926206896551724, + "grad_norm": 0.687591016292572, + "learning_rate": 3.4137305756186543e-06, + "loss": 0.6175, + "step": 931 + }, + { + "epoch": 1.9282758620689655, + "grad_norm": 0.685309112071991, + "learning_rate": 3.4023114939117773e-06, + "loss": 0.6384, + "step": 932 + }, + { + "epoch": 1.930344827586207, + "grad_norm": 0.7224867343902588, + "learning_rate": 3.390901685550887e-06, + "loss": 0.6648, + "step": 933 + }, + { + "epoch": 1.9324137931034482, + "grad_norm": 0.7063190340995789, + "learning_rate": 3.3795012167610898e-06, + "loss": 0.6494, + "step": 934 + }, + { + "epoch": 1.9344827586206896, + "grad_norm": 0.7511458992958069, + "learning_rate": 3.3681101537132914e-06, + "loss": 0.6272, + "step": 935 + }, + { + "epoch": 1.936551724137931, + "grad_norm": 0.6883670091629028, + "learning_rate": 3.356728562523805e-06, + "loss": 0.6276, + "step": 936 + }, + { + "epoch": 1.9386206896551723, + "grad_norm": 0.7098174691200256, + "learning_rate": 3.3453565092539586e-06, + "loss": 0.6675, + "step": 937 + }, + { + "epoch": 1.9406896551724138, + "grad_norm": 0.7062398791313171, + "learning_rate": 3.33399405990973e-06, + "loss": 0.6408, + "step": 938 + }, + { + "epoch": 1.9427586206896552, + "grad_norm": 0.7228072881698608, + "learning_rate": 3.3226412804413444e-06, + "loss": 0.6422, + "step": 939 + }, + { + "epoch": 1.9448275862068964, + "grad_norm": 0.7137162685394287, + "learning_rate": 3.3112982367429105e-06, + "loss": 0.6474, + "step": 940 + }, + { + "epoch": 1.946896551724138, + "grad_norm": 0.6957323551177979, + "learning_rate": 3.299964994652017e-06, + "loss": 0.6492, + "step": 941 + }, + { + "epoch": 1.9489655172413793, + "grad_norm": 0.6673961281776428, + "learning_rate": 3.288641619949374e-06, + "loss": 0.6512, + "step": 942 + }, + { + "epoch": 1.9510344827586206, + "grad_norm": 0.7092306017875671, + "learning_rate": 3.2773281783584104e-06, + "loss": 0.6623, + "step": 943 + }, + { + "epoch": 1.953103448275862, + "grad_norm": 0.7391819953918457, + "learning_rate": 3.2660247355449037e-06, + "loss": 0.6698, + "step": 944 + }, + { + "epoch": 1.9551724137931035, + "grad_norm": 0.7239782214164734, + "learning_rate": 3.254731357116597e-06, + "loss": 0.6404, + "step": 945 + }, + { + "epoch": 1.9572413793103447, + "grad_norm": 0.735502302646637, + "learning_rate": 3.2434481086228163e-06, + "loss": 0.6278, + "step": 946 + }, + { + "epoch": 1.9593103448275864, + "grad_norm": 0.7158029079437256, + "learning_rate": 3.232175055554091e-06, + "loss": 0.6441, + "step": 947 + }, + { + "epoch": 1.9613793103448276, + "grad_norm": 0.7179998755455017, + "learning_rate": 3.2209122633417766e-06, + "loss": 0.6464, + "step": 948 + }, + { + "epoch": 1.9634482758620688, + "grad_norm": 0.6865692734718323, + "learning_rate": 3.209659797357669e-06, + "loss": 0.6373, + "step": 949 + }, + { + "epoch": 1.9655172413793105, + "grad_norm": 0.7228394150733948, + "learning_rate": 3.1984177229136287e-06, + "loss": 0.6583, + "step": 950 + }, + { + "epoch": 1.9675862068965517, + "grad_norm": 0.7167955636978149, + "learning_rate": 3.187186105261205e-06, + "loss": 0.6276, + "step": 951 + }, + { + "epoch": 1.969655172413793, + "grad_norm": 0.6937686204910278, + "learning_rate": 3.1759650095912474e-06, + "loss": 0.6203, + "step": 952 + }, + { + "epoch": 1.9717241379310346, + "grad_norm": 0.6669635772705078, + "learning_rate": 3.16475450103354e-06, + "loss": 0.6447, + "step": 953 + }, + { + "epoch": 1.9737931034482759, + "grad_norm": 0.7243427634239197, + "learning_rate": 3.1535546446564107e-06, + "loss": 0.6375, + "step": 954 + }, + { + "epoch": 1.975862068965517, + "grad_norm": 0.7239643931388855, + "learning_rate": 3.1423655054663665e-06, + "loss": 0.666, + "step": 955 + }, + { + "epoch": 1.9779310344827588, + "grad_norm": 0.7242691516876221, + "learning_rate": 3.1311871484077017e-06, + "loss": 0.6427, + "step": 956 + }, + { + "epoch": 1.98, + "grad_norm": 0.7019519209861755, + "learning_rate": 3.1200196383621363e-06, + "loss": 0.637, + "step": 957 + }, + { + "epoch": 1.9820689655172414, + "grad_norm": 0.7041625380516052, + "learning_rate": 3.1088630401484245e-06, + "loss": 0.6297, + "step": 958 + }, + { + "epoch": 1.9841379310344829, + "grad_norm": 0.7110829949378967, + "learning_rate": 3.0977174185219937e-06, + "loss": 0.6353, + "step": 959 + }, + { + "epoch": 1.986206896551724, + "grad_norm": 0.6561233401298523, + "learning_rate": 3.0865828381745515e-06, + "loss": 0.6184, + "step": 960 + }, + { + "epoch": 1.9882758620689656, + "grad_norm": 0.7260603904724121, + "learning_rate": 3.0754593637337276e-06, + "loss": 0.6435, + "step": 961 + }, + { + "epoch": 1.990344827586207, + "grad_norm": 0.7066130638122559, + "learning_rate": 3.0643470597626885e-06, + "loss": 0.6555, + "step": 962 + }, + { + "epoch": 1.9924137931034482, + "grad_norm": 0.6790770888328552, + "learning_rate": 3.05324599075976e-06, + "loss": 0.6216, + "step": 963 + }, + { + "epoch": 1.9944827586206897, + "grad_norm": 0.7200824022293091, + "learning_rate": 3.042156221158064e-06, + "loss": 0.6475, + "step": 964 + }, + { + "epoch": 1.9965517241379311, + "grad_norm": 0.6905136108398438, + "learning_rate": 3.0310778153251325e-06, + "loss": 0.6133, + "step": 965 + }, + { + "epoch": 1.9986206896551724, + "grad_norm": 0.7127056121826172, + "learning_rate": 3.020010837562546e-06, + "loss": 0.667, + "step": 966 + }, + { + "epoch": 2.0006896551724136, + "grad_norm": 0.6826462149620056, + "learning_rate": 3.0089553521055455e-06, + "loss": 0.6117, + "step": 967 + }, + { + "epoch": 2.0027586206896553, + "grad_norm": 0.803715169429779, + "learning_rate": 2.9979114231226776e-06, + "loss": 0.5496, + "step": 968 + }, + { + "epoch": 2.0048275862068965, + "grad_norm": 0.7920373678207397, + "learning_rate": 2.986879114715403e-06, + "loss": 0.5323, + "step": 969 + }, + { + "epoch": 2.0068965517241377, + "grad_norm": 0.7448180913925171, + "learning_rate": 2.9758584909177434e-06, + "loss": 0.5318, + "step": 970 + }, + { + "epoch": 2.0089655172413794, + "grad_norm": 0.7098000049591064, + "learning_rate": 2.964849615695892e-06, + "loss": 0.5341, + "step": 971 + }, + { + "epoch": 2.0110344827586206, + "grad_norm": 0.7263830304145813, + "learning_rate": 2.9538525529478557e-06, + "loss": 0.5605, + "step": 972 + }, + { + "epoch": 2.013103448275862, + "grad_norm": 0.7626384496688843, + "learning_rate": 2.9428673665030772e-06, + "loss": 0.5407, + "step": 973 + }, + { + "epoch": 2.0151724137931035, + "grad_norm": 0.8168633580207825, + "learning_rate": 2.9318941201220664e-06, + "loss": 0.5628, + "step": 974 + }, + { + "epoch": 2.0172413793103448, + "grad_norm": 0.8351486325263977, + "learning_rate": 2.9209328774960327e-06, + "loss": 0.5295, + "step": 975 + }, + { + "epoch": 2.0193103448275864, + "grad_norm": 0.7826777100563049, + "learning_rate": 2.90998370224651e-06, + "loss": 0.5141, + "step": 976 + }, + { + "epoch": 2.0213793103448277, + "grad_norm": 0.7777988314628601, + "learning_rate": 2.899046657924992e-06, + "loss": 0.5577, + "step": 977 + }, + { + "epoch": 2.023448275862069, + "grad_norm": 0.751868724822998, + "learning_rate": 2.8881218080125605e-06, + "loss": 0.5402, + "step": 978 + }, + { + "epoch": 2.0255172413793106, + "grad_norm": 0.6992483735084534, + "learning_rate": 2.8772092159195193e-06, + "loss": 0.5323, + "step": 979 + }, + { + "epoch": 2.027586206896552, + "grad_norm": 0.7283136248588562, + "learning_rate": 2.8663089449850246e-06, + "loss": 0.5364, + "step": 980 + }, + { + "epoch": 2.029655172413793, + "grad_norm": 0.7618082761764526, + "learning_rate": 2.8554210584767188e-06, + "loss": 0.5388, + "step": 981 + }, + { + "epoch": 2.0317241379310347, + "grad_norm": 0.7302858829498291, + "learning_rate": 2.844545619590361e-06, + "loss": 0.5247, + "step": 982 + }, + { + "epoch": 2.033793103448276, + "grad_norm": 0.706373393535614, + "learning_rate": 2.8336826914494607e-06, + "loss": 0.5138, + "step": 983 + }, + { + "epoch": 2.035862068965517, + "grad_norm": 0.6692239046096802, + "learning_rate": 2.822832337104915e-06, + "loss": 0.5363, + "step": 984 + }, + { + "epoch": 2.037931034482759, + "grad_norm": 0.7576499581336975, + "learning_rate": 2.8119946195346375e-06, + "loss": 0.5389, + "step": 985 + }, + { + "epoch": 2.04, + "grad_norm": 0.7035110592842102, + "learning_rate": 2.8011696016431967e-06, + "loss": 0.5333, + "step": 986 + }, + { + "epoch": 2.0420689655172413, + "grad_norm": 0.7431889176368713, + "learning_rate": 2.790357346261445e-06, + "loss": 0.5228, + "step": 987 + }, + { + "epoch": 2.044137931034483, + "grad_norm": 0.7341644167900085, + "learning_rate": 2.7795579161461685e-06, + "loss": 0.5153, + "step": 988 + }, + { + "epoch": 2.046206896551724, + "grad_norm": 0.7432145476341248, + "learning_rate": 2.7687713739796972e-06, + "loss": 0.5299, + "step": 989 + }, + { + "epoch": 2.0482758620689654, + "grad_norm": 0.6737946271896362, + "learning_rate": 2.7579977823695727e-06, + "loss": 0.5301, + "step": 990 + }, + { + "epoch": 2.050344827586207, + "grad_norm": 0.7434407472610474, + "learning_rate": 2.7472372038481575e-06, + "loss": 0.5442, + "step": 991 + }, + { + "epoch": 2.0524137931034483, + "grad_norm": 0.7228389382362366, + "learning_rate": 2.7364897008722867e-06, + "loss": 0.5392, + "step": 992 + }, + { + "epoch": 2.0544827586206895, + "grad_norm": 0.6981167793273926, + "learning_rate": 2.725755335822903e-06, + "loss": 0.5503, + "step": 993 + }, + { + "epoch": 2.056551724137931, + "grad_norm": 0.6900709867477417, + "learning_rate": 2.7150341710046924e-06, + "loss": 0.5448, + "step": 994 + }, + { + "epoch": 2.0586206896551724, + "grad_norm": 0.6905665993690491, + "learning_rate": 2.704326268645724e-06, + "loss": 0.5304, + "step": 995 + }, + { + "epoch": 2.0606896551724136, + "grad_norm": 0.720930814743042, + "learning_rate": 2.6936316908970874e-06, + "loss": 0.5304, + "step": 996 + }, + { + "epoch": 2.0627586206896553, + "grad_norm": 0.697242796421051, + "learning_rate": 2.6829504998325352e-06, + "loss": 0.5209, + "step": 997 + }, + { + "epoch": 2.0648275862068965, + "grad_norm": 0.6550352573394775, + "learning_rate": 2.67228275744812e-06, + "loss": 0.5319, + "step": 998 + }, + { + "epoch": 2.0668965517241378, + "grad_norm": 0.7028206586837769, + "learning_rate": 2.661628525661835e-06, + "loss": 0.5253, + "step": 999 + }, + { + "epoch": 2.0689655172413794, + "grad_norm": 0.7251700162887573, + "learning_rate": 2.6509878663132514e-06, + "loss": 0.5589, + "step": 1000 + }, + { + "epoch": 2.0710344827586207, + "grad_norm": 0.67062908411026, + "learning_rate": 2.6403608411631744e-06, + "loss": 0.5462, + "step": 1001 + }, + { + "epoch": 2.073103448275862, + "grad_norm": 0.7188374996185303, + "learning_rate": 2.629747511893255e-06, + "loss": 0.5201, + "step": 1002 + }, + { + "epoch": 2.0751724137931036, + "grad_norm": 0.6892343163490295, + "learning_rate": 2.61914794010567e-06, + "loss": 0.5357, + "step": 1003 + }, + { + "epoch": 2.077241379310345, + "grad_norm": 0.6836119294166565, + "learning_rate": 2.6085621873227264e-06, + "loss": 0.5123, + "step": 1004 + }, + { + "epoch": 2.079310344827586, + "grad_norm": 0.6911861896514893, + "learning_rate": 2.5979903149865386e-06, + "loss": 0.546, + "step": 1005 + }, + { + "epoch": 2.0813793103448277, + "grad_norm": 0.6947477459907532, + "learning_rate": 2.5874323844586414e-06, + "loss": 0.5408, + "step": 1006 + }, + { + "epoch": 2.083448275862069, + "grad_norm": 0.6671912670135498, + "learning_rate": 2.5768884570196615e-06, + "loss": 0.5256, + "step": 1007 + }, + { + "epoch": 2.08551724137931, + "grad_norm": 0.6696575284004211, + "learning_rate": 2.566358593868936e-06, + "loss": 0.5186, + "step": 1008 + }, + { + "epoch": 2.087586206896552, + "grad_norm": 0.6655759215354919, + "learning_rate": 2.555842856124182e-06, + "loss": 0.5686, + "step": 1009 + }, + { + "epoch": 2.089655172413793, + "grad_norm": 0.685197114944458, + "learning_rate": 2.5453413048211175e-06, + "loss": 0.4912, + "step": 1010 + }, + { + "epoch": 2.0917241379310343, + "grad_norm": 0.7344222664833069, + "learning_rate": 2.5348540009131283e-06, + "loss": 0.5489, + "step": 1011 + }, + { + "epoch": 2.093793103448276, + "grad_norm": 0.7047103047370911, + "learning_rate": 2.5243810052709006e-06, + "loss": 0.5366, + "step": 1012 + }, + { + "epoch": 2.095862068965517, + "grad_norm": 0.6715806722640991, + "learning_rate": 2.513922378682075e-06, + "loss": 0.5337, + "step": 1013 + }, + { + "epoch": 2.0979310344827584, + "grad_norm": 0.6645084619522095, + "learning_rate": 2.5034781818508867e-06, + "loss": 0.5451, + "step": 1014 + }, + { + "epoch": 2.1, + "grad_norm": 0.6680448651313782, + "learning_rate": 2.4930484753978224e-06, + "loss": 0.5141, + "step": 1015 + }, + { + "epoch": 2.1020689655172413, + "grad_norm": 0.6710419058799744, + "learning_rate": 2.482633319859259e-06, + "loss": 0.5233, + "step": 1016 + }, + { + "epoch": 2.1041379310344825, + "grad_norm": 0.6609877943992615, + "learning_rate": 2.472232775687119e-06, + "loss": 0.5343, + "step": 1017 + }, + { + "epoch": 2.106206896551724, + "grad_norm": 0.6360195279121399, + "learning_rate": 2.4618469032485164e-06, + "loss": 0.5549, + "step": 1018 + }, + { + "epoch": 2.1082758620689654, + "grad_norm": 0.6452751159667969, + "learning_rate": 2.4514757628254075e-06, + "loss": 0.5369, + "step": 1019 + }, + { + "epoch": 2.110344827586207, + "grad_norm": 0.6557055115699768, + "learning_rate": 2.441119414614241e-06, + "loss": 0.5277, + "step": 1020 + }, + { + "epoch": 2.1124137931034483, + "grad_norm": 0.7097801566123962, + "learning_rate": 2.4307779187256064e-06, + "loss": 0.5358, + "step": 1021 + }, + { + "epoch": 2.1144827586206896, + "grad_norm": 0.6572842597961426, + "learning_rate": 2.4204513351838897e-06, + "loss": 0.5394, + "step": 1022 + }, + { + "epoch": 2.1165517241379312, + "grad_norm": 0.6733007431030273, + "learning_rate": 2.4101397239269202e-06, + "loss": 0.5579, + "step": 1023 + }, + { + "epoch": 2.1186206896551725, + "grad_norm": 0.6480697393417358, + "learning_rate": 2.3998431448056237e-06, + "loss": 0.5635, + "step": 1024 + }, + { + "epoch": 2.1206896551724137, + "grad_norm": 0.6844390034675598, + "learning_rate": 2.389561657583681e-06, + "loss": 0.5477, + "step": 1025 + }, + { + "epoch": 2.1227586206896554, + "grad_norm": 0.6834397912025452, + "learning_rate": 2.3792953219371713e-06, + "loss": 0.5252, + "step": 1026 + }, + { + "epoch": 2.1248275862068966, + "grad_norm": 0.666690468788147, + "learning_rate": 2.3690441974542322e-06, + "loss": 0.574, + "step": 1027 + }, + { + "epoch": 2.126896551724138, + "grad_norm": 0.6561058759689331, + "learning_rate": 2.358808343634712e-06, + "loss": 0.5291, + "step": 1028 + }, + { + "epoch": 2.1289655172413795, + "grad_norm": 0.6525347828865051, + "learning_rate": 2.3485878198898253e-06, + "loss": 0.5626, + "step": 1029 + }, + { + "epoch": 2.1310344827586207, + "grad_norm": 0.6866719126701355, + "learning_rate": 2.3383826855418064e-06, + "loss": 0.524, + "step": 1030 + }, + { + "epoch": 2.133103448275862, + "grad_norm": 0.6798598170280457, + "learning_rate": 2.3281929998235664e-06, + "loss": 0.5184, + "step": 1031 + }, + { + "epoch": 2.1351724137931036, + "grad_norm": 0.6799730658531189, + "learning_rate": 2.31801882187835e-06, + "loss": 0.5086, + "step": 1032 + }, + { + "epoch": 2.137241379310345, + "grad_norm": 0.6736025214195251, + "learning_rate": 2.30786021075939e-06, + "loss": 0.5333, + "step": 1033 + }, + { + "epoch": 2.139310344827586, + "grad_norm": 0.6704842448234558, + "learning_rate": 2.297717225429567e-06, + "loss": 0.5065, + "step": 1034 + }, + { + "epoch": 2.1413793103448278, + "grad_norm": 0.6423225998878479, + "learning_rate": 2.287589924761065e-06, + "loss": 0.5292, + "step": 1035 + }, + { + "epoch": 2.143448275862069, + "grad_norm": 0.6814275979995728, + "learning_rate": 2.2774783675350303e-06, + "loss": 0.5418, + "step": 1036 + }, + { + "epoch": 2.14551724137931, + "grad_norm": 0.6510114073753357, + "learning_rate": 2.2673826124412314e-06, + "loss": 0.5175, + "step": 1037 + }, + { + "epoch": 2.147586206896552, + "grad_norm": 0.6611030697822571, + "learning_rate": 2.257302718077721e-06, + "loss": 0.5526, + "step": 1038 + }, + { + "epoch": 2.149655172413793, + "grad_norm": 0.6714975833892822, + "learning_rate": 2.247238742950483e-06, + "loss": 0.524, + "step": 1039 + }, + { + "epoch": 2.1517241379310343, + "grad_norm": 0.6750874519348145, + "learning_rate": 2.2371907454731168e-06, + "loss": 0.5529, + "step": 1040 + }, + { + "epoch": 2.153793103448276, + "grad_norm": 0.6794918179512024, + "learning_rate": 2.2271587839664673e-06, + "loss": 0.5034, + "step": 1041 + }, + { + "epoch": 2.1558620689655172, + "grad_norm": 0.6698225140571594, + "learning_rate": 2.217142916658318e-06, + "loss": 0.531, + "step": 1042 + }, + { + "epoch": 2.1579310344827585, + "grad_norm": 0.6470540761947632, + "learning_rate": 2.2071432016830257e-06, + "loss": 0.5631, + "step": 1043 + }, + { + "epoch": 2.16, + "grad_norm": 0.7159327268600464, + "learning_rate": 2.197159697081208e-06, + "loss": 0.5476, + "step": 1044 + }, + { + "epoch": 2.1620689655172414, + "grad_norm": 0.7387550473213196, + "learning_rate": 2.18719246079938e-06, + "loss": 0.5019, + "step": 1045 + }, + { + "epoch": 2.1641379310344826, + "grad_norm": 0.7351087331771851, + "learning_rate": 2.1772415506896447e-06, + "loss": 0.5607, + "step": 1046 + }, + { + "epoch": 2.1662068965517243, + "grad_norm": 0.6578730940818787, + "learning_rate": 2.1673070245093324e-06, + "loss": 0.5494, + "step": 1047 + }, + { + "epoch": 2.1682758620689655, + "grad_norm": 0.675528347492218, + "learning_rate": 2.157388939920689e-06, + "loss": 0.526, + "step": 1048 + }, + { + "epoch": 2.1703448275862067, + "grad_norm": 0.6719648838043213, + "learning_rate": 2.1474873544905204e-06, + "loss": 0.5334, + "step": 1049 + }, + { + "epoch": 2.1724137931034484, + "grad_norm": 0.6997095346450806, + "learning_rate": 2.137602325689873e-06, + "loss": 0.5274, + "step": 1050 + }, + { + "epoch": 2.1744827586206896, + "grad_norm": 0.7052441835403442, + "learning_rate": 2.127733910893693e-06, + "loss": 0.5378, + "step": 1051 + }, + { + "epoch": 2.176551724137931, + "grad_norm": 0.6952463388442993, + "learning_rate": 2.1178821673804943e-06, + "loss": 0.5244, + "step": 1052 + }, + { + "epoch": 2.1786206896551725, + "grad_norm": 0.664283037185669, + "learning_rate": 2.108047152332028e-06, + "loss": 0.5335, + "step": 1053 + }, + { + "epoch": 2.1806896551724138, + "grad_norm": 0.6841456294059753, + "learning_rate": 2.0982289228329495e-06, + "loss": 0.5367, + "step": 1054 + }, + { + "epoch": 2.182758620689655, + "grad_norm": 0.6873970031738281, + "learning_rate": 2.0884275358704863e-06, + "loss": 0.5242, + "step": 1055 + }, + { + "epoch": 2.1848275862068967, + "grad_norm": 0.704255998134613, + "learning_rate": 2.078643048334109e-06, + "loss": 0.5369, + "step": 1056 + }, + { + "epoch": 2.186896551724138, + "grad_norm": 0.6614420413970947, + "learning_rate": 2.0688755170152e-06, + "loss": 0.5397, + "step": 1057 + }, + { + "epoch": 2.188965517241379, + "grad_norm": 0.65423583984375, + "learning_rate": 2.0591249986067214e-06, + "loss": 0.5374, + "step": 1058 + }, + { + "epoch": 2.191034482758621, + "grad_norm": 0.6417624950408936, + "learning_rate": 2.0493915497028933e-06, + "loss": 0.5238, + "step": 1059 + }, + { + "epoch": 2.193103448275862, + "grad_norm": 0.6377779841423035, + "learning_rate": 2.039675226798854e-06, + "loss": 0.5377, + "step": 1060 + }, + { + "epoch": 2.1951724137931032, + "grad_norm": 0.6852798461914062, + "learning_rate": 2.029976086290347e-06, + "loss": 0.5386, + "step": 1061 + }, + { + "epoch": 2.197241379310345, + "grad_norm": 0.6332745552062988, + "learning_rate": 2.0202941844733792e-06, + "loss": 0.5541, + "step": 1062 + }, + { + "epoch": 2.199310344827586, + "grad_norm": 0.6756484508514404, + "learning_rate": 2.0106295775439018e-06, + "loss": 0.5092, + "step": 1063 + }, + { + "epoch": 2.2013793103448274, + "grad_norm": 0.6440528035163879, + "learning_rate": 2.0009823215974815e-06, + "loss": 0.5256, + "step": 1064 + }, + { + "epoch": 2.203448275862069, + "grad_norm": 0.6707040071487427, + "learning_rate": 1.9913524726289784e-06, + "loss": 0.5317, + "step": 1065 + }, + { + "epoch": 2.2055172413793103, + "grad_norm": 0.6765915155410767, + "learning_rate": 1.9817400865322166e-06, + "loss": 0.5319, + "step": 1066 + }, + { + "epoch": 2.2075862068965515, + "grad_norm": 0.6627142429351807, + "learning_rate": 1.972145219099662e-06, + "loss": 0.5246, + "step": 1067 + }, + { + "epoch": 2.209655172413793, + "grad_norm": 0.650766909122467, + "learning_rate": 1.962567926022099e-06, + "loss": 0.541, + "step": 1068 + }, + { + "epoch": 2.2117241379310344, + "grad_norm": 0.6753216981887817, + "learning_rate": 1.9530082628883058e-06, + "loss": 0.5472, + "step": 1069 + }, + { + "epoch": 2.213793103448276, + "grad_norm": 0.6764883399009705, + "learning_rate": 1.9434662851847325e-06, + "loss": 0.522, + "step": 1070 + }, + { + "epoch": 2.2158620689655173, + "grad_norm": 0.6282169818878174, + "learning_rate": 1.9339420482951794e-06, + "loss": 0.5127, + "step": 1071 + }, + { + "epoch": 2.2179310344827585, + "grad_norm": 0.6872683763504028, + "learning_rate": 1.9244356075004743e-06, + "loss": 0.5661, + "step": 1072 + }, + { + "epoch": 2.22, + "grad_norm": 0.6766654849052429, + "learning_rate": 1.914947017978153e-06, + "loss": 0.543, + "step": 1073 + }, + { + "epoch": 2.2220689655172414, + "grad_norm": 0.6761453747749329, + "learning_rate": 1.9054763348021372e-06, + "loss": 0.5139, + "step": 1074 + }, + { + "epoch": 2.2241379310344827, + "grad_norm": 0.6696473360061646, + "learning_rate": 1.896023612942422e-06, + "loss": 0.5224, + "step": 1075 + }, + { + "epoch": 2.2262068965517243, + "grad_norm": 0.7067750096321106, + "learning_rate": 1.8865889072647386e-06, + "loss": 0.5376, + "step": 1076 + }, + { + "epoch": 2.2282758620689656, + "grad_norm": 0.654654324054718, + "learning_rate": 1.8771722725302644e-06, + "loss": 0.5438, + "step": 1077 + }, + { + "epoch": 2.230344827586207, + "grad_norm": 0.6717188358306885, + "learning_rate": 1.8677737633952715e-06, + "loss": 0.5354, + "step": 1078 + }, + { + "epoch": 2.2324137931034485, + "grad_norm": 0.6855020523071289, + "learning_rate": 1.8583934344108446e-06, + "loss": 0.5294, + "step": 1079 + }, + { + "epoch": 2.2344827586206897, + "grad_norm": 0.6623315811157227, + "learning_rate": 1.8490313400225296e-06, + "loss": 0.5317, + "step": 1080 + }, + { + "epoch": 2.236551724137931, + "grad_norm": 0.687870979309082, + "learning_rate": 1.8396875345700498e-06, + "loss": 0.5539, + "step": 1081 + }, + { + "epoch": 2.2386206896551726, + "grad_norm": 0.6914966106414795, + "learning_rate": 1.830362072286963e-06, + "loss": 0.5537, + "step": 1082 + }, + { + "epoch": 2.240689655172414, + "grad_norm": 0.7108622193336487, + "learning_rate": 1.8210550073003701e-06, + "loss": 0.5252, + "step": 1083 + }, + { + "epoch": 2.242758620689655, + "grad_norm": 0.6858078241348267, + "learning_rate": 1.8117663936305785e-06, + "loss": 0.5542, + "step": 1084 + }, + { + "epoch": 2.2448275862068967, + "grad_norm": 0.6860451698303223, + "learning_rate": 1.8024962851908106e-06, + "loss": 0.5246, + "step": 1085 + }, + { + "epoch": 2.246896551724138, + "grad_norm": 0.6590988636016846, + "learning_rate": 1.7932447357868737e-06, + "loss": 0.5328, + "step": 1086 + }, + { + "epoch": 2.248965517241379, + "grad_norm": 0.7076303958892822, + "learning_rate": 1.7840117991168566e-06, + "loss": 0.5205, + "step": 1087 + }, + { + "epoch": 2.251034482758621, + "grad_norm": 0.6729241013526917, + "learning_rate": 1.7747975287708147e-06, + "loss": 0.5322, + "step": 1088 + }, + { + "epoch": 2.253103448275862, + "grad_norm": 0.6530643701553345, + "learning_rate": 1.7656019782304602e-06, + "loss": 0.552, + "step": 1089 + }, + { + "epoch": 2.2551724137931033, + "grad_norm": 0.6714941263198853, + "learning_rate": 1.7564252008688514e-06, + "loss": 0.5344, + "step": 1090 + }, + { + "epoch": 2.257241379310345, + "grad_norm": 0.6543527841567993, + "learning_rate": 1.7472672499500837e-06, + "loss": 0.5282, + "step": 1091 + }, + { + "epoch": 2.259310344827586, + "grad_norm": 0.6821870803833008, + "learning_rate": 1.7381281786289767e-06, + "loss": 0.5009, + "step": 1092 + }, + { + "epoch": 2.2613793103448274, + "grad_norm": 0.6596633791923523, + "learning_rate": 1.729008039950772e-06, + "loss": 0.546, + "step": 1093 + }, + { + "epoch": 2.263448275862069, + "grad_norm": 0.6522555351257324, + "learning_rate": 1.71990688685082e-06, + "loss": 0.5464, + "step": 1094 + }, + { + "epoch": 2.2655172413793103, + "grad_norm": 0.668368399143219, + "learning_rate": 1.7108247721542725e-06, + "loss": 0.528, + "step": 1095 + }, + { + "epoch": 2.2675862068965515, + "grad_norm": 0.697405219078064, + "learning_rate": 1.701761748575786e-06, + "loss": 0.5657, + "step": 1096 + }, + { + "epoch": 2.269655172413793, + "grad_norm": 0.6927202939987183, + "learning_rate": 1.6927178687191953e-06, + "loss": 0.518, + "step": 1097 + }, + { + "epoch": 2.2717241379310344, + "grad_norm": 0.658198356628418, + "learning_rate": 1.6836931850772331e-06, + "loss": 0.5482, + "step": 1098 + }, + { + "epoch": 2.2737931034482757, + "grad_norm": 0.688456654548645, + "learning_rate": 1.6746877500312054e-06, + "loss": 0.5114, + "step": 1099 + }, + { + "epoch": 2.2758620689655173, + "grad_norm": 0.6654470562934875, + "learning_rate": 1.6657016158506966e-06, + "loss": 0.5417, + "step": 1100 + }, + { + "epoch": 2.2779310344827586, + "grad_norm": 0.6720593571662903, + "learning_rate": 1.656734834693266e-06, + "loss": 0.5243, + "step": 1101 + }, + { + "epoch": 2.2800000000000002, + "grad_norm": 0.6629576086997986, + "learning_rate": 1.6477874586041415e-06, + "loss": 0.5244, + "step": 1102 + }, + { + "epoch": 2.2820689655172415, + "grad_norm": 0.6911601424217224, + "learning_rate": 1.6388595395159207e-06, + "loss": 0.5541, + "step": 1103 + }, + { + "epoch": 2.2841379310344827, + "grad_norm": 0.6672130823135376, + "learning_rate": 1.6299511292482689e-06, + "loss": 0.5315, + "step": 1104 + }, + { + "epoch": 2.2862068965517244, + "grad_norm": 0.688179612159729, + "learning_rate": 1.621062279507617e-06, + "loss": 0.526, + "step": 1105 + }, + { + "epoch": 2.2882758620689656, + "grad_norm": 0.6778724193572998, + "learning_rate": 1.612193041886862e-06, + "loss": 0.5029, + "step": 1106 + }, + { + "epoch": 2.290344827586207, + "grad_norm": 0.6525766253471375, + "learning_rate": 1.6033434678650684e-06, + "loss": 0.5367, + "step": 1107 + }, + { + "epoch": 2.2924137931034485, + "grad_norm": 0.6853109002113342, + "learning_rate": 1.5945136088071677e-06, + "loss": 0.5411, + "step": 1108 + }, + { + "epoch": 2.2944827586206897, + "grad_norm": 0.6768192648887634, + "learning_rate": 1.5857035159636625e-06, + "loss": 0.5474, + "step": 1109 + }, + { + "epoch": 2.296551724137931, + "grad_norm": 0.6869329214096069, + "learning_rate": 1.5769132404703274e-06, + "loss": 0.5156, + "step": 1110 + }, + { + "epoch": 2.2986206896551726, + "grad_norm": 0.641927182674408, + "learning_rate": 1.5681428333479104e-06, + "loss": 0.5612, + "step": 1111 + }, + { + "epoch": 2.300689655172414, + "grad_norm": 0.6849982738494873, + "learning_rate": 1.5593923455018462e-06, + "loss": 0.524, + "step": 1112 + }, + { + "epoch": 2.302758620689655, + "grad_norm": 0.6639160513877869, + "learning_rate": 1.550661827721941e-06, + "loss": 0.5219, + "step": 1113 + }, + { + "epoch": 2.3048275862068968, + "grad_norm": 0.6571563482284546, + "learning_rate": 1.5419513306821055e-06, + "loss": 0.5213, + "step": 1114 + }, + { + "epoch": 2.306896551724138, + "grad_norm": 0.6656675338745117, + "learning_rate": 1.5332609049400298e-06, + "loss": 0.5142, + "step": 1115 + }, + { + "epoch": 2.308965517241379, + "grad_norm": 0.6509329080581665, + "learning_rate": 1.5245906009369211e-06, + "loss": 0.5379, + "step": 1116 + }, + { + "epoch": 2.311034482758621, + "grad_norm": 0.6804161667823792, + "learning_rate": 1.5159404689971797e-06, + "loss": 0.5348, + "step": 1117 + }, + { + "epoch": 2.313103448275862, + "grad_norm": 0.6578646898269653, + "learning_rate": 1.5073105593281385e-06, + "loss": 0.5218, + "step": 1118 + }, + { + "epoch": 2.3151724137931033, + "grad_norm": 0.6653922200202942, + "learning_rate": 1.4987009220197406e-06, + "loss": 0.5304, + "step": 1119 + }, + { + "epoch": 2.317241379310345, + "grad_norm": 0.6492714881896973, + "learning_rate": 1.4901116070442772e-06, + "loss": 0.5218, + "step": 1120 + }, + { + "epoch": 2.3193103448275862, + "grad_norm": 0.7024757862091064, + "learning_rate": 1.4815426642560753e-06, + "loss": 0.545, + "step": 1121 + }, + { + "epoch": 2.3213793103448275, + "grad_norm": 0.6717721223831177, + "learning_rate": 1.47299414339122e-06, + "loss": 0.5376, + "step": 1122 + }, + { + "epoch": 2.323448275862069, + "grad_norm": 0.6341143250465393, + "learning_rate": 1.4644660940672628e-06, + "loss": 0.5158, + "step": 1123 + }, + { + "epoch": 2.3255172413793104, + "grad_norm": 0.6935365200042725, + "learning_rate": 1.455958565782934e-06, + "loss": 0.537, + "step": 1124 + }, + { + "epoch": 2.3275862068965516, + "grad_norm": 0.6960786581039429, + "learning_rate": 1.4474716079178541e-06, + "loss": 0.5547, + "step": 1125 + }, + { + "epoch": 2.3296551724137933, + "grad_norm": 0.6549028754234314, + "learning_rate": 1.4390052697322499e-06, + "loss": 0.544, + "step": 1126 + }, + { + "epoch": 2.3317241379310345, + "grad_norm": 0.6705878973007202, + "learning_rate": 1.430559600366665e-06, + "loss": 0.5502, + "step": 1127 + }, + { + "epoch": 2.3337931034482757, + "grad_norm": 0.6721364855766296, + "learning_rate": 1.4221346488416765e-06, + "loss": 0.5532, + "step": 1128 + }, + { + "epoch": 2.3358620689655174, + "grad_norm": 0.6675882935523987, + "learning_rate": 1.4137304640576161e-06, + "loss": 0.5174, + "step": 1129 + }, + { + "epoch": 2.3379310344827586, + "grad_norm": 0.650718629360199, + "learning_rate": 1.4053470947942694e-06, + "loss": 0.5565, + "step": 1130 + }, + { + "epoch": 2.34, + "grad_norm": 0.6801978945732117, + "learning_rate": 1.3969845897106166e-06, + "loss": 0.5276, + "step": 1131 + }, + { + "epoch": 2.3420689655172415, + "grad_norm": 0.6504865288734436, + "learning_rate": 1.3886429973445253e-06, + "loss": 0.5126, + "step": 1132 + }, + { + "epoch": 2.3441379310344828, + "grad_norm": 0.6334866881370544, + "learning_rate": 1.3803223661124938e-06, + "loss": 0.5036, + "step": 1133 + }, + { + "epoch": 2.346206896551724, + "grad_norm": 0.6434235572814941, + "learning_rate": 1.3720227443093437e-06, + "loss": 0.5119, + "step": 1134 + }, + { + "epoch": 2.3482758620689657, + "grad_norm": 0.6684521436691284, + "learning_rate": 1.3637441801079666e-06, + "loss": 0.526, + "step": 1135 + }, + { + "epoch": 2.350344827586207, + "grad_norm": 0.6425202488899231, + "learning_rate": 1.3554867215590174e-06, + "loss": 0.5348, + "step": 1136 + }, + { + "epoch": 2.352413793103448, + "grad_norm": 0.6454555988311768, + "learning_rate": 1.3472504165906614e-06, + "loss": 0.5457, + "step": 1137 + }, + { + "epoch": 2.35448275862069, + "grad_norm": 0.6798628568649292, + "learning_rate": 1.3390353130082756e-06, + "loss": 0.5436, + "step": 1138 + }, + { + "epoch": 2.356551724137931, + "grad_norm": 0.6718437671661377, + "learning_rate": 1.330841458494183e-06, + "loss": 0.5232, + "step": 1139 + }, + { + "epoch": 2.3586206896551722, + "grad_norm": 0.6361545324325562, + "learning_rate": 1.3226689006073712e-06, + "loss": 0.5313, + "step": 1140 + }, + { + "epoch": 2.360689655172414, + "grad_norm": 0.6867586374282837, + "learning_rate": 1.3145176867832165e-06, + "loss": 0.5452, + "step": 1141 + }, + { + "epoch": 2.362758620689655, + "grad_norm": 0.665239691734314, + "learning_rate": 1.3063878643332112e-06, + "loss": 0.5136, + "step": 1142 + }, + { + "epoch": 2.3648275862068964, + "grad_norm": 0.6792328953742981, + "learning_rate": 1.2982794804446858e-06, + "loss": 0.5325, + "step": 1143 + }, + { + "epoch": 2.366896551724138, + "grad_norm": 0.6773557066917419, + "learning_rate": 1.2901925821805372e-06, + "loss": 0.5112, + "step": 1144 + }, + { + "epoch": 2.3689655172413793, + "grad_norm": 0.6913496851921082, + "learning_rate": 1.2821272164789544e-06, + "loss": 0.5547, + "step": 1145 + }, + { + "epoch": 2.3710344827586205, + "grad_norm": 0.6805727481842041, + "learning_rate": 1.2740834301531468e-06, + "loss": 0.5473, + "step": 1146 + }, + { + "epoch": 2.373103448275862, + "grad_norm": 0.7125365734100342, + "learning_rate": 1.266061269891073e-06, + "loss": 0.5248, + "step": 1147 + }, + { + "epoch": 2.3751724137931034, + "grad_norm": 0.6634067893028259, + "learning_rate": 1.2580607822551677e-06, + "loss": 0.5287, + "step": 1148 + }, + { + "epoch": 2.3772413793103446, + "grad_norm": 0.6796189546585083, + "learning_rate": 1.2500820136820735e-06, + "loss": 0.532, + "step": 1149 + }, + { + "epoch": 2.3793103448275863, + "grad_norm": 0.6692973971366882, + "learning_rate": 1.2421250104823683e-06, + "loss": 0.5265, + "step": 1150 + }, + { + "epoch": 2.3813793103448275, + "grad_norm": 0.6606705784797668, + "learning_rate": 1.2341898188403068e-06, + "loss": 0.5184, + "step": 1151 + }, + { + "epoch": 2.3834482758620688, + "grad_norm": 0.6459177136421204, + "learning_rate": 1.2262764848135316e-06, + "loss": 0.5578, + "step": 1152 + }, + { + "epoch": 2.3855172413793104, + "grad_norm": 0.67464679479599, + "learning_rate": 1.2183850543328313e-06, + "loss": 0.5367, + "step": 1153 + }, + { + "epoch": 2.3875862068965517, + "grad_norm": 0.6794815063476562, + "learning_rate": 1.2105155732018532e-06, + "loss": 0.5458, + "step": 1154 + }, + { + "epoch": 2.389655172413793, + "grad_norm": 0.6938785910606384, + "learning_rate": 1.2026680870968494e-06, + "loss": 0.5217, + "step": 1155 + }, + { + "epoch": 2.3917241379310346, + "grad_norm": 0.6906960010528564, + "learning_rate": 1.194842641566406e-06, + "loss": 0.5315, + "step": 1156 + }, + { + "epoch": 2.393793103448276, + "grad_norm": 0.6642972826957703, + "learning_rate": 1.187039282031182e-06, + "loss": 0.5315, + "step": 1157 + }, + { + "epoch": 2.395862068965517, + "grad_norm": 0.6678175926208496, + "learning_rate": 1.179258053783644e-06, + "loss": 0.5007, + "step": 1158 + }, + { + "epoch": 2.3979310344827587, + "grad_norm": 0.6204706430435181, + "learning_rate": 1.171499001987802e-06, + "loss": 0.5617, + "step": 1159 + }, + { + "epoch": 2.4, + "grad_norm": 0.6954528093338013, + "learning_rate": 1.163762171678951e-06, + "loss": 0.5211, + "step": 1160 + }, + { + "epoch": 2.402068965517241, + "grad_norm": 0.6939405798912048, + "learning_rate": 1.156047607763407e-06, + "loss": 0.5264, + "step": 1161 + }, + { + "epoch": 2.404137931034483, + "grad_norm": 0.7006513476371765, + "learning_rate": 1.1483553550182453e-06, + "loss": 0.5488, + "step": 1162 + }, + { + "epoch": 2.406206896551724, + "grad_norm": 0.67670738697052, + "learning_rate": 1.1406854580910426e-06, + "loss": 0.5304, + "step": 1163 + }, + { + "epoch": 2.4082758620689657, + "grad_norm": 0.681532084941864, + "learning_rate": 1.1330379614996223e-06, + "loss": 0.5307, + "step": 1164 + }, + { + "epoch": 2.410344827586207, + "grad_norm": 0.671879768371582, + "learning_rate": 1.1254129096317807e-06, + "loss": 0.5423, + "step": 1165 + }, + { + "epoch": 2.412413793103448, + "grad_norm": 0.6575496196746826, + "learning_rate": 1.1178103467450512e-06, + "loss": 0.5261, + "step": 1166 + }, + { + "epoch": 2.41448275862069, + "grad_norm": 0.692674458026886, + "learning_rate": 1.110230316966427e-06, + "loss": 0.522, + "step": 1167 + }, + { + "epoch": 2.416551724137931, + "grad_norm": 0.6568799018859863, + "learning_rate": 1.102672864292122e-06, + "loss": 0.5187, + "step": 1168 + }, + { + "epoch": 2.4186206896551723, + "grad_norm": 0.6846016049385071, + "learning_rate": 1.095138032587298e-06, + "loss": 0.5518, + "step": 1169 + }, + { + "epoch": 2.420689655172414, + "grad_norm": 0.6592535376548767, + "learning_rate": 1.0876258655858307e-06, + "loss": 0.5241, + "step": 1170 + }, + { + "epoch": 2.422758620689655, + "grad_norm": 0.6726316809654236, + "learning_rate": 1.0801364068900334e-06, + "loss": 0.5153, + "step": 1171 + }, + { + "epoch": 2.4248275862068964, + "grad_norm": 0.661979615688324, + "learning_rate": 1.0726696999704266e-06, + "loss": 0.5321, + "step": 1172 + }, + { + "epoch": 2.426896551724138, + "grad_norm": 0.6790592074394226, + "learning_rate": 1.0652257881654625e-06, + "loss": 0.5361, + "step": 1173 + }, + { + "epoch": 2.4289655172413793, + "grad_norm": 0.6957859396934509, + "learning_rate": 1.057804714681296e-06, + "loss": 0.5129, + "step": 1174 + }, + { + "epoch": 2.4310344827586206, + "grad_norm": 0.6732125282287598, + "learning_rate": 1.0504065225915166e-06, + "loss": 0.56, + "step": 1175 + }, + { + "epoch": 2.4331034482758622, + "grad_norm": 0.6757326126098633, + "learning_rate": 1.0430312548369076e-06, + "loss": 0.5469, + "step": 1176 + }, + { + "epoch": 2.4351724137931035, + "grad_norm": 0.673953115940094, + "learning_rate": 1.0356789542251939e-06, + "loss": 0.507, + "step": 1177 + }, + { + "epoch": 2.4372413793103447, + "grad_norm": 0.6575935482978821, + "learning_rate": 1.028349663430792e-06, + "loss": 0.5134, + "step": 1178 + }, + { + "epoch": 2.4393103448275864, + "grad_norm": 0.673143208026886, + "learning_rate": 1.0210434249945677e-06, + "loss": 0.5443, + "step": 1179 + }, + { + "epoch": 2.4413793103448276, + "grad_norm": 0.6751227974891663, + "learning_rate": 1.0137602813235824e-06, + "loss": 0.5535, + "step": 1180 + }, + { + "epoch": 2.443448275862069, + "grad_norm": 0.6402212977409363, + "learning_rate": 1.0065002746908532e-06, + "loss": 0.5517, + "step": 1181 + }, + { + "epoch": 2.4455172413793105, + "grad_norm": 0.6959084868431091, + "learning_rate": 9.992634472351016e-07, + "loss": 0.5104, + "step": 1182 + }, + { + "epoch": 2.4475862068965517, + "grad_norm": 0.673514187335968, + "learning_rate": 9.92049840960514e-07, + "loss": 0.5485, + "step": 1183 + }, + { + "epoch": 2.449655172413793, + "grad_norm": 0.64295893907547, + "learning_rate": 9.84859497736495e-07, + "loss": 0.5202, + "step": 1184 + }, + { + "epoch": 2.4517241379310346, + "grad_norm": 0.669701337814331, + "learning_rate": 9.776924592974257e-07, + "loss": 0.5201, + "step": 1185 + }, + { + "epoch": 2.453793103448276, + "grad_norm": 0.6700237393379211, + "learning_rate": 9.705487672424202e-07, + "loss": 0.5437, + "step": 1186 + }, + { + "epoch": 2.455862068965517, + "grad_norm": 0.6907758712768555, + "learning_rate": 9.63428463035085e-07, + "loss": 0.511, + "step": 1187 + }, + { + "epoch": 2.4579310344827587, + "grad_norm": 0.6732699275016785, + "learning_rate": 9.563315880032798e-07, + "loss": 0.5418, + "step": 1188 + }, + { + "epoch": 2.46, + "grad_norm": 0.7099267840385437, + "learning_rate": 9.492581833388736e-07, + "loss": 0.5403, + "step": 1189 + }, + { + "epoch": 2.462068965517241, + "grad_norm": 0.6354034543037415, + "learning_rate": 9.422082900975105e-07, + "loss": 0.5227, + "step": 1190 + }, + { + "epoch": 2.464137931034483, + "grad_norm": 0.6575520038604736, + "learning_rate": 9.351819491983671e-07, + "loss": 0.5327, + "step": 1191 + }, + { + "epoch": 2.466206896551724, + "grad_norm": 0.6583318114280701, + "learning_rate": 9.281792014239171e-07, + "loss": 0.5292, + "step": 1192 + }, + { + "epoch": 2.4682758620689658, + "grad_norm": 0.6778759360313416, + "learning_rate": 9.212000874196953e-07, + "loss": 0.5091, + "step": 1193 + }, + { + "epoch": 2.470344827586207, + "grad_norm": 0.6768158674240112, + "learning_rate": 9.142446476940598e-07, + "loss": 0.5043, + "step": 1194 + }, + { + "epoch": 2.472413793103448, + "grad_norm": 0.6922417879104614, + "learning_rate": 9.073129226179589e-07, + "loss": 0.5382, + "step": 1195 + }, + { + "epoch": 2.47448275862069, + "grad_norm": 0.6937352418899536, + "learning_rate": 9.004049524246944e-07, + "loss": 0.5334, + "step": 1196 + }, + { + "epoch": 2.476551724137931, + "grad_norm": 0.6940251588821411, + "learning_rate": 8.935207772096904e-07, + "loss": 0.5235, + "step": 1197 + }, + { + "epoch": 2.4786206896551723, + "grad_norm": 0.6610797643661499, + "learning_rate": 8.86660436930259e-07, + "loss": 0.5369, + "step": 1198 + }, + { + "epoch": 2.480689655172414, + "grad_norm": 0.6743729114532471, + "learning_rate": 8.798239714053692e-07, + "loss": 0.5494, + "step": 1199 + }, + { + "epoch": 2.4827586206896552, + "grad_norm": 0.6906112432479858, + "learning_rate": 8.730114203154144e-07, + "loss": 0.5221, + "step": 1200 + }, + { + "epoch": 2.4848275862068965, + "grad_norm": 0.6822516322135925, + "learning_rate": 8.662228232019876e-07, + "loss": 0.5364, + "step": 1201 + }, + { + "epoch": 2.486896551724138, + "grad_norm": 0.6591139435768127, + "learning_rate": 8.594582194676393e-07, + "loss": 0.5412, + "step": 1202 + }, + { + "epoch": 2.4889655172413794, + "grad_norm": 0.694092333316803, + "learning_rate": 8.527176483756671e-07, + "loss": 0.5233, + "step": 1203 + }, + { + "epoch": 2.4910344827586206, + "grad_norm": 0.6754273772239685, + "learning_rate": 8.460011490498671e-07, + "loss": 0.5501, + "step": 1204 + }, + { + "epoch": 2.4931034482758623, + "grad_norm": 0.670056164264679, + "learning_rate": 8.393087604743283e-07, + "loss": 0.528, + "step": 1205 + }, + { + "epoch": 2.4951724137931035, + "grad_norm": 0.6645668148994446, + "learning_rate": 8.326405214931838e-07, + "loss": 0.5586, + "step": 1206 + }, + { + "epoch": 2.4972413793103447, + "grad_norm": 0.6548720002174377, + "learning_rate": 8.259964708104091e-07, + "loss": 0.5316, + "step": 1207 + }, + { + "epoch": 2.4993103448275864, + "grad_norm": 0.6840012073516846, + "learning_rate": 8.193766469895737e-07, + "loss": 0.5274, + "step": 1208 + }, + { + "epoch": 2.5013793103448276, + "grad_norm": 0.655719518661499, + "learning_rate": 8.127810884536402e-07, + "loss": 0.521, + "step": 1209 + }, + { + "epoch": 2.503448275862069, + "grad_norm": 0.6610667109489441, + "learning_rate": 8.062098334847185e-07, + "loss": 0.5472, + "step": 1210 + }, + { + "epoch": 2.5055172413793105, + "grad_norm": 0.6627329587936401, + "learning_rate": 7.996629202238654e-07, + "loss": 0.5101, + "step": 1211 + }, + { + "epoch": 2.5075862068965518, + "grad_norm": 0.6512289047241211, + "learning_rate": 7.931403866708464e-07, + "loss": 0.5263, + "step": 1212 + }, + { + "epoch": 2.509655172413793, + "grad_norm": 0.6700037121772766, + "learning_rate": 7.866422706839239e-07, + "loss": 0.5265, + "step": 1213 + }, + { + "epoch": 2.5117241379310347, + "grad_norm": 0.649949312210083, + "learning_rate": 7.801686099796358e-07, + "loss": 0.5439, + "step": 1214 + }, + { + "epoch": 2.513793103448276, + "grad_norm": 0.6628941893577576, + "learning_rate": 7.737194421325739e-07, + "loss": 0.5112, + "step": 1215 + }, + { + "epoch": 2.515862068965517, + "grad_norm": 0.6837835311889648, + "learning_rate": 7.672948045751699e-07, + "loss": 0.5497, + "step": 1216 + }, + { + "epoch": 2.517931034482759, + "grad_norm": 0.6798170804977417, + "learning_rate": 7.60894734597476e-07, + "loss": 0.5439, + "step": 1217 + }, + { + "epoch": 2.52, + "grad_norm": 0.6628232002258301, + "learning_rate": 7.545192693469467e-07, + "loss": 0.5435, + "step": 1218 + }, + { + "epoch": 2.5220689655172412, + "grad_norm": 0.6693230867385864, + "learning_rate": 7.481684458282273e-07, + "loss": 0.5377, + "step": 1219 + }, + { + "epoch": 2.524137931034483, + "grad_norm": 0.6691813468933105, + "learning_rate": 7.418423009029363e-07, + "loss": 0.5463, + "step": 1220 + }, + { + "epoch": 2.526206896551724, + "grad_norm": 0.6763612031936646, + "learning_rate": 7.355408712894508e-07, + "loss": 0.5616, + "step": 1221 + }, + { + "epoch": 2.5282758620689654, + "grad_norm": 0.6607294082641602, + "learning_rate": 7.292641935626966e-07, + "loss": 0.5381, + "step": 1222 + }, + { + "epoch": 2.530344827586207, + "grad_norm": 0.6794742941856384, + "learning_rate": 7.23012304153931e-07, + "loss": 0.5427, + "step": 1223 + }, + { + "epoch": 2.5324137931034483, + "grad_norm": 0.6626706719398499, + "learning_rate": 7.167852393505393e-07, + "loss": 0.534, + "step": 1224 + }, + { + "epoch": 2.5344827586206895, + "grad_norm": 0.6796131730079651, + "learning_rate": 7.105830352958143e-07, + "loss": 0.5493, + "step": 1225 + }, + { + "epoch": 2.536551724137931, + "grad_norm": 0.6909650564193726, + "learning_rate": 7.04405727988754e-07, + "loss": 0.5359, + "step": 1226 + }, + { + "epoch": 2.5386206896551724, + "grad_norm": 0.6646261811256409, + "learning_rate": 6.982533532838481e-07, + "loss": 0.4803, + "step": 1227 + }, + { + "epoch": 2.5406896551724136, + "grad_norm": 0.6714354753494263, + "learning_rate": 6.921259468908742e-07, + "loss": 0.5642, + "step": 1228 + }, + { + "epoch": 2.5427586206896553, + "grad_norm": 0.6736488342285156, + "learning_rate": 6.86023544374686e-07, + "loss": 0.5513, + "step": 1229 + }, + { + "epoch": 2.5448275862068965, + "grad_norm": 0.7024829387664795, + "learning_rate": 6.799461811550101e-07, + "loss": 0.5382, + "step": 1230 + }, + { + "epoch": 2.5468965517241378, + "grad_norm": 0.670295000076294, + "learning_rate": 6.738938925062399e-07, + "loss": 0.5301, + "step": 1231 + }, + { + "epoch": 2.5489655172413794, + "grad_norm": 0.6387397050857544, + "learning_rate": 6.678667135572293e-07, + "loss": 0.5093, + "step": 1232 + }, + { + "epoch": 2.5510344827586207, + "grad_norm": 0.6766871809959412, + "learning_rate": 6.618646792910893e-07, + "loss": 0.5519, + "step": 1233 + }, + { + "epoch": 2.553103448275862, + "grad_norm": 0.7046573758125305, + "learning_rate": 6.558878245449879e-07, + "loss": 0.5218, + "step": 1234 + }, + { + "epoch": 2.5551724137931036, + "grad_norm": 0.6664929389953613, + "learning_rate": 6.499361840099421e-07, + "loss": 0.522, + "step": 1235 + }, + { + "epoch": 2.557241379310345, + "grad_norm": 0.6669551730155945, + "learning_rate": 6.440097922306232e-07, + "loss": 0.565, + "step": 1236 + }, + { + "epoch": 2.559310344827586, + "grad_norm": 0.6854833364486694, + "learning_rate": 6.381086836051498e-07, + "loss": 0.5561, + "step": 1237 + }, + { + "epoch": 2.5613793103448277, + "grad_norm": 0.6877058148384094, + "learning_rate": 6.322328923848958e-07, + "loss": 0.5313, + "step": 1238 + }, + { + "epoch": 2.563448275862069, + "grad_norm": 0.6529080867767334, + "learning_rate": 6.263824526742807e-07, + "loss": 0.5318, + "step": 1239 + }, + { + "epoch": 2.56551724137931, + "grad_norm": 0.6818994879722595, + "learning_rate": 6.205573984305857e-07, + "loss": 0.5387, + "step": 1240 + }, + { + "epoch": 2.567586206896552, + "grad_norm": 0.6354886889457703, + "learning_rate": 6.147577634637413e-07, + "loss": 0.5351, + "step": 1241 + }, + { + "epoch": 2.569655172413793, + "grad_norm": 0.6590231657028198, + "learning_rate": 6.089835814361461e-07, + "loss": 0.5288, + "step": 1242 + }, + { + "epoch": 2.5717241379310343, + "grad_norm": 0.6300837993621826, + "learning_rate": 6.03234885862457e-07, + "loss": 0.554, + "step": 1243 + }, + { + "epoch": 2.573793103448276, + "grad_norm": 0.64389568567276, + "learning_rate": 5.975117101094091e-07, + "loss": 0.5548, + "step": 1244 + }, + { + "epoch": 2.575862068965517, + "grad_norm": 0.65228271484375, + "learning_rate": 5.918140873956063e-07, + "loss": 0.5418, + "step": 1245 + }, + { + "epoch": 2.5779310344827584, + "grad_norm": 0.6716015934944153, + "learning_rate": 5.861420507913462e-07, + "loss": 0.5506, + "step": 1246 + }, + { + "epoch": 2.58, + "grad_norm": 0.6640960574150085, + "learning_rate": 5.804956332184092e-07, + "loss": 0.5215, + "step": 1247 + }, + { + "epoch": 2.5820689655172413, + "grad_norm": 0.6824872493743896, + "learning_rate": 5.748748674498855e-07, + "loss": 0.546, + "step": 1248 + }, + { + "epoch": 2.5841379310344825, + "grad_norm": 0.6688941121101379, + "learning_rate": 5.692797861099719e-07, + "loss": 0.5166, + "step": 1249 + }, + { + "epoch": 2.586206896551724, + "grad_norm": 0.6445797085762024, + "learning_rate": 5.637104216737871e-07, + "loss": 0.5286, + "step": 1250 + }, + { + "epoch": 2.5882758620689654, + "grad_norm": 0.6584656238555908, + "learning_rate": 5.581668064671847e-07, + "loss": 0.5364, + "step": 1251 + }, + { + "epoch": 2.5903448275862067, + "grad_norm": 0.6681441068649292, + "learning_rate": 5.526489726665624e-07, + "loss": 0.5203, + "step": 1252 + }, + { + "epoch": 2.5924137931034483, + "grad_norm": 0.6537905931472778, + "learning_rate": 5.471569522986775e-07, + "loss": 0.5203, + "step": 1253 + }, + { + "epoch": 2.5944827586206896, + "grad_norm": 0.613910973072052, + "learning_rate": 5.416907772404606e-07, + "loss": 0.5105, + "step": 1254 + }, + { + "epoch": 2.596551724137931, + "grad_norm": 0.6802027225494385, + "learning_rate": 5.3625047921883e-07, + "loss": 0.5231, + "step": 1255 + }, + { + "epoch": 2.5986206896551725, + "grad_norm": 0.6352335214614868, + "learning_rate": 5.30836089810507e-07, + "loss": 0.5125, + "step": 1256 + }, + { + "epoch": 2.6006896551724137, + "grad_norm": 0.6671028137207031, + "learning_rate": 5.254476404418341e-07, + "loss": 0.5221, + "step": 1257 + }, + { + "epoch": 2.602758620689655, + "grad_norm": 0.6607826948165894, + "learning_rate": 5.200851623885922e-07, + "loss": 0.5384, + "step": 1258 + }, + { + "epoch": 2.6048275862068966, + "grad_norm": 0.645964503288269, + "learning_rate": 5.147486867758201e-07, + "loss": 0.5306, + "step": 1259 + }, + { + "epoch": 2.606896551724138, + "grad_norm": 0.6596983671188354, + "learning_rate": 5.094382445776281e-07, + "loss": 0.5464, + "step": 1260 + }, + { + "epoch": 2.608965517241379, + "grad_norm": 0.678046464920044, + "learning_rate": 5.041538666170282e-07, + "loss": 0.524, + "step": 1261 + }, + { + "epoch": 2.6110344827586207, + "grad_norm": 0.6556622385978699, + "learning_rate": 4.988955835657456e-07, + "loss": 0.5395, + "step": 1262 + }, + { + "epoch": 2.613103448275862, + "grad_norm": 0.6770464181900024, + "learning_rate": 4.93663425944047e-07, + "loss": 0.5394, + "step": 1263 + }, + { + "epoch": 2.6151724137931036, + "grad_norm": 0.6753197312355042, + "learning_rate": 4.884574241205603e-07, + "loss": 0.5239, + "step": 1264 + }, + { + "epoch": 2.617241379310345, + "grad_norm": 0.655596911907196, + "learning_rate": 4.832776083120983e-07, + "loss": 0.5148, + "step": 1265 + }, + { + "epoch": 2.619310344827586, + "grad_norm": 0.6836827397346497, + "learning_rate": 4.781240085834865e-07, + "loss": 0.5453, + "step": 1266 + }, + { + "epoch": 2.6213793103448277, + "grad_norm": 0.680099368095398, + "learning_rate": 4.729966548473841e-07, + "loss": 0.5148, + "step": 1267 + }, + { + "epoch": 2.623448275862069, + "grad_norm": 0.6692262887954712, + "learning_rate": 4.678955768641147e-07, + "loss": 0.554, + "step": 1268 + }, + { + "epoch": 2.62551724137931, + "grad_norm": 0.6534875631332397, + "learning_rate": 4.628208042414889e-07, + "loss": 0.5548, + "step": 1269 + }, + { + "epoch": 2.627586206896552, + "grad_norm": 0.617273211479187, + "learning_rate": 4.5777236643463787e-07, + "loss": 0.5305, + "step": 1270 + }, + { + "epoch": 2.629655172413793, + "grad_norm": 0.6691027283668518, + "learning_rate": 4.5275029274583715e-07, + "loss": 0.5292, + "step": 1271 + }, + { + "epoch": 2.6317241379310343, + "grad_norm": 0.6685481071472168, + "learning_rate": 4.477546123243409e-07, + "loss": 0.5306, + "step": 1272 + }, + { + "epoch": 2.633793103448276, + "grad_norm": 0.6642303466796875, + "learning_rate": 4.4278535416620914e-07, + "loss": 0.5276, + "step": 1273 + }, + { + "epoch": 2.6358620689655172, + "grad_norm": 0.6658126711845398, + "learning_rate": 4.3784254711414064e-07, + "loss": 0.5447, + "step": 1274 + }, + { + "epoch": 2.637931034482759, + "grad_norm": 0.6844912767410278, + "learning_rate": 4.329262198573109e-07, + "loss": 0.5419, + "step": 1275 + }, + { + "epoch": 2.64, + "grad_norm": 0.6531012654304504, + "learning_rate": 4.280364009311927e-07, + "loss": 0.5283, + "step": 1276 + }, + { + "epoch": 2.6420689655172414, + "grad_norm": 0.6809141635894775, + "learning_rate": 4.231731187174065e-07, + "loss": 0.5486, + "step": 1277 + }, + { + "epoch": 2.644137931034483, + "grad_norm": 0.6723445653915405, + "learning_rate": 4.183364014435398e-07, + "loss": 0.5441, + "step": 1278 + }, + { + "epoch": 2.6462068965517243, + "grad_norm": 0.6648197174072266, + "learning_rate": 4.1352627718299953e-07, + "loss": 0.5506, + "step": 1279 + }, + { + "epoch": 2.6482758620689655, + "grad_norm": 0.668096661567688, + "learning_rate": 4.087427738548322e-07, + "loss": 0.5363, + "step": 1280 + }, + { + "epoch": 2.650344827586207, + "grad_norm": 0.6981197595596313, + "learning_rate": 4.0398591922357787e-07, + "loss": 0.5673, + "step": 1281 + }, + { + "epoch": 2.6524137931034484, + "grad_norm": 0.6475323438644409, + "learning_rate": 3.9925574089909603e-07, + "loss": 0.5402, + "step": 1282 + }, + { + "epoch": 2.6544827586206896, + "grad_norm": 0.640293300151825, + "learning_rate": 3.945522663364154e-07, + "loss": 0.5302, + "step": 1283 + }, + { + "epoch": 2.6565517241379313, + "grad_norm": 0.6663315296173096, + "learning_rate": 3.898755228355661e-07, + "loss": 0.5379, + "step": 1284 + }, + { + "epoch": 2.6586206896551725, + "grad_norm": 0.6679863929748535, + "learning_rate": 3.852255375414271e-07, + "loss": 0.5605, + "step": 1285 + }, + { + "epoch": 2.6606896551724137, + "grad_norm": 0.6763351559638977, + "learning_rate": 3.8060233744356634e-07, + "loss": 0.5102, + "step": 1286 + }, + { + "epoch": 2.6627586206896554, + "grad_norm": 0.6907029151916504, + "learning_rate": 3.760059493760848e-07, + "loss": 0.522, + "step": 1287 + }, + { + "epoch": 2.6648275862068966, + "grad_norm": 0.6595637202262878, + "learning_rate": 3.7143640001745916e-07, + "loss": 0.549, + "step": 1288 + }, + { + "epoch": 2.666896551724138, + "grad_norm": 0.6693307161331177, + "learning_rate": 3.6689371589039013e-07, + "loss": 0.5257, + "step": 1289 + }, + { + "epoch": 2.6689655172413795, + "grad_norm": 0.6373291611671448, + "learning_rate": 3.6237792336164424e-07, + "loss": 0.5478, + "step": 1290 + }, + { + "epoch": 2.6710344827586208, + "grad_norm": 0.6235068440437317, + "learning_rate": 3.5788904864190346e-07, + "loss": 0.5484, + "step": 1291 + }, + { + "epoch": 2.673103448275862, + "grad_norm": 0.6607393026351929, + "learning_rate": 3.534271177856163e-07, + "loss": 0.5082, + "step": 1292 + }, + { + "epoch": 2.6751724137931037, + "grad_norm": 0.6282607316970825, + "learning_rate": 3.489921566908372e-07, + "loss": 0.5366, + "step": 1293 + }, + { + "epoch": 2.677241379310345, + "grad_norm": 0.6705973148345947, + "learning_rate": 3.445841910990877e-07, + "loss": 0.5382, + "step": 1294 + }, + { + "epoch": 2.679310344827586, + "grad_norm": 0.6531069278717041, + "learning_rate": 3.4020324659519487e-07, + "loss": 0.5556, + "step": 1295 + }, + { + "epoch": 2.681379310344828, + "grad_norm": 0.6508034467697144, + "learning_rate": 3.358493486071568e-07, + "loss": 0.5256, + "step": 1296 + }, + { + "epoch": 2.683448275862069, + "grad_norm": 0.6649227142333984, + "learning_rate": 3.315225224059809e-07, + "loss": 0.5104, + "step": 1297 + }, + { + "epoch": 2.6855172413793102, + "grad_norm": 0.6882157325744629, + "learning_rate": 3.2722279310554825e-07, + "loss": 0.542, + "step": 1298 + }, + { + "epoch": 2.687586206896552, + "grad_norm": 0.6619060039520264, + "learning_rate": 3.2295018566246007e-07, + "loss": 0.5379, + "step": 1299 + }, + { + "epoch": 2.689655172413793, + "grad_norm": 0.6553718447685242, + "learning_rate": 3.1870472487589845e-07, + "loss": 0.5122, + "step": 1300 + }, + { + "epoch": 2.6917241379310344, + "grad_norm": 0.691162645816803, + "learning_rate": 3.1448643538748045e-07, + "loss": 0.5102, + "step": 1301 + }, + { + "epoch": 2.693793103448276, + "grad_norm": 0.6377774477005005, + "learning_rate": 3.1029534168111217e-07, + "loss": 0.524, + "step": 1302 + }, + { + "epoch": 2.6958620689655173, + "grad_norm": 0.6641302704811096, + "learning_rate": 3.06131468082852e-07, + "loss": 0.5126, + "step": 1303 + }, + { + "epoch": 2.6979310344827585, + "grad_norm": 0.6569216251373291, + "learning_rate": 3.0199483876076465e-07, + "loss": 0.5347, + "step": 1304 + }, + { + "epoch": 2.7, + "grad_norm": 0.6541246175765991, + "learning_rate": 2.9788547772478416e-07, + "loss": 0.5384, + "step": 1305 + }, + { + "epoch": 2.7020689655172414, + "grad_norm": 0.6962401866912842, + "learning_rate": 2.938034088265723e-07, + "loss": 0.5321, + "step": 1306 + }, + { + "epoch": 2.7041379310344826, + "grad_norm": 0.6459115147590637, + "learning_rate": 2.897486557593826e-07, + "loss": 0.5477, + "step": 1307 + }, + { + "epoch": 2.7062068965517243, + "grad_norm": 0.6690937280654907, + "learning_rate": 2.8572124205791916e-07, + "loss": 0.5361, + "step": 1308 + }, + { + "epoch": 2.7082758620689655, + "grad_norm": 0.651241660118103, + "learning_rate": 2.817211910982037e-07, + "loss": 0.5231, + "step": 1309 + }, + { + "epoch": 2.7103448275862068, + "grad_norm": 0.6507266759872437, + "learning_rate": 2.7774852609743887e-07, + "loss": 0.5331, + "step": 1310 + }, + { + "epoch": 2.7124137931034484, + "grad_norm": 0.6396605968475342, + "learning_rate": 2.7380327011387096e-07, + "loss": 0.5418, + "step": 1311 + }, + { + "epoch": 2.7144827586206897, + "grad_norm": 0.635365903377533, + "learning_rate": 2.698854460466599e-07, + "loss": 0.5258, + "step": 1312 + }, + { + "epoch": 2.716551724137931, + "grad_norm": 0.6772063374519348, + "learning_rate": 2.6599507663574387e-07, + "loss": 0.5274, + "step": 1313 + }, + { + "epoch": 2.7186206896551726, + "grad_norm": 0.6762843132019043, + "learning_rate": 2.621321844617092e-07, + "loss": 0.5279, + "step": 1314 + }, + { + "epoch": 2.720689655172414, + "grad_norm": 0.6602565050125122, + "learning_rate": 2.582967919456547e-07, + "loss": 0.5343, + "step": 1315 + }, + { + "epoch": 2.722758620689655, + "grad_norm": 0.6536930799484253, + "learning_rate": 2.544889213490698e-07, + "loss": 0.5146, + "step": 1316 + }, + { + "epoch": 2.7248275862068967, + "grad_norm": 0.6757822632789612, + "learning_rate": 2.5070859477369645e-07, + "loss": 0.5247, + "step": 1317 + }, + { + "epoch": 2.726896551724138, + "grad_norm": 0.6500410437583923, + "learning_rate": 2.46955834161407e-07, + "loss": 0.5383, + "step": 1318 + }, + { + "epoch": 2.728965517241379, + "grad_norm": 0.6390625834465027, + "learning_rate": 2.432306612940738e-07, + "loss": 0.5254, + "step": 1319 + }, + { + "epoch": 2.731034482758621, + "grad_norm": 0.6645777821540833, + "learning_rate": 2.39533097793444e-07, + "loss": 0.4968, + "step": 1320 + }, + { + "epoch": 2.733103448275862, + "grad_norm": 0.6638719439506531, + "learning_rate": 2.3586316512101416e-07, + "loss": 0.5147, + "step": 1321 + }, + { + "epoch": 2.7351724137931033, + "grad_norm": 0.6468044519424438, + "learning_rate": 2.3222088457790448e-07, + "loss": 0.5321, + "step": 1322 + }, + { + "epoch": 2.737241379310345, + "grad_norm": 0.6644803285598755, + "learning_rate": 2.286062773047354e-07, + "loss": 0.5462, + "step": 1323 + }, + { + "epoch": 2.739310344827586, + "grad_norm": 0.6668086647987366, + "learning_rate": 2.2501936428150794e-07, + "loss": 0.5093, + "step": 1324 + }, + { + "epoch": 2.7413793103448274, + "grad_norm": 0.6860660910606384, + "learning_rate": 2.2146016632747624e-07, + "loss": 0.5485, + "step": 1325 + }, + { + "epoch": 2.743448275862069, + "grad_norm": 0.6771222949028015, + "learning_rate": 2.1792870410103195e-07, + "loss": 0.5049, + "step": 1326 + }, + { + "epoch": 2.7455172413793103, + "grad_norm": 0.6570666432380676, + "learning_rate": 2.1442499809958273e-07, + "loss": 0.5263, + "step": 1327 + }, + { + "epoch": 2.7475862068965515, + "grad_norm": 0.6926378011703491, + "learning_rate": 2.109490686594301e-07, + "loss": 0.5532, + "step": 1328 + }, + { + "epoch": 2.749655172413793, + "grad_norm": 0.6724755764007568, + "learning_rate": 2.0750093595565735e-07, + "loss": 0.5282, + "step": 1329 + }, + { + "epoch": 2.7517241379310344, + "grad_norm": 0.6550016403198242, + "learning_rate": 2.0408062000200623e-07, + "loss": 0.5475, + "step": 1330 + }, + { + "epoch": 2.7537931034482757, + "grad_norm": 0.6652045249938965, + "learning_rate": 2.006881406507677e-07, + "loss": 0.5517, + "step": 1331 + }, + { + "epoch": 2.7558620689655173, + "grad_norm": 0.6777829527854919, + "learning_rate": 1.9732351759265743e-07, + "loss": 0.5755, + "step": 1332 + }, + { + "epoch": 2.7579310344827586, + "grad_norm": 0.632814347743988, + "learning_rate": 1.9398677035671222e-07, + "loss": 0.5004, + "step": 1333 + }, + { + "epoch": 2.76, + "grad_norm": 0.6533367037773132, + "learning_rate": 1.9067791831016647e-07, + "loss": 0.5314, + "step": 1334 + }, + { + "epoch": 2.7620689655172415, + "grad_norm": 0.6759463548660278, + "learning_rate": 1.8739698065834977e-07, + "loss": 0.5277, + "step": 1335 + }, + { + "epoch": 2.7641379310344827, + "grad_norm": 0.6170943975448608, + "learning_rate": 1.841439764445646e-07, + "loss": 0.5207, + "step": 1336 + }, + { + "epoch": 2.766206896551724, + "grad_norm": 0.6548056602478027, + "learning_rate": 1.8091892454998595e-07, + "loss": 0.5539, + "step": 1337 + }, + { + "epoch": 2.7682758620689656, + "grad_norm": 0.6568596363067627, + "learning_rate": 1.7772184369354517e-07, + "loss": 0.5267, + "step": 1338 + }, + { + "epoch": 2.770344827586207, + "grad_norm": 0.6602894067764282, + "learning_rate": 1.7455275243182413e-07, + "loss": 0.5589, + "step": 1339 + }, + { + "epoch": 2.772413793103448, + "grad_norm": 0.6518042087554932, + "learning_rate": 1.714116691589457e-07, + "loss": 0.518, + "step": 1340 + }, + { + "epoch": 2.7744827586206897, + "grad_norm": 0.6620248556137085, + "learning_rate": 1.682986121064689e-07, + "loss": 0.5521, + "step": 1341 + }, + { + "epoch": 2.776551724137931, + "grad_norm": 0.6624969840049744, + "learning_rate": 1.6521359934328185e-07, + "loss": 0.5467, + "step": 1342 + }, + { + "epoch": 2.778620689655172, + "grad_norm": 0.6587342619895935, + "learning_rate": 1.6215664877549774e-07, + "loss": 0.5063, + "step": 1343 + }, + { + "epoch": 2.780689655172414, + "grad_norm": 0.6892203092575073, + "learning_rate": 1.5912777814635018e-07, + "loss": 0.5463, + "step": 1344 + }, + { + "epoch": 2.782758620689655, + "grad_norm": 0.6598497033119202, + "learning_rate": 1.561270050360897e-07, + "loss": 0.5265, + "step": 1345 + }, + { + "epoch": 2.7848275862068963, + "grad_norm": 0.6603181958198547, + "learning_rate": 1.5315434686188345e-07, + "loss": 0.5467, + "step": 1346 + }, + { + "epoch": 2.786896551724138, + "grad_norm": 0.6583660840988159, + "learning_rate": 1.5020982087771362e-07, + "loss": 0.5537, + "step": 1347 + }, + { + "epoch": 2.788965517241379, + "grad_norm": 0.6598078012466431, + "learning_rate": 1.472934441742746e-07, + "loss": 0.5563, + "step": 1348 + }, + { + "epoch": 2.7910344827586204, + "grad_norm": 0.6481250524520874, + "learning_rate": 1.444052336788787e-07, + "loss": 0.5573, + "step": 1349 + }, + { + "epoch": 2.793103448275862, + "grad_norm": 0.6612368226051331, + "learning_rate": 1.4154520615535185e-07, + "loss": 0.5604, + "step": 1350 + }, + { + "epoch": 2.7951724137931033, + "grad_norm": 0.6412969827651978, + "learning_rate": 1.3871337820394404e-07, + "loss": 0.5592, + "step": 1351 + }, + { + "epoch": 2.7972413793103446, + "grad_norm": 0.6546990275382996, + "learning_rate": 1.359097662612252e-07, + "loss": 0.5295, + "step": 1352 + }, + { + "epoch": 2.7993103448275862, + "grad_norm": 0.6505655646324158, + "learning_rate": 1.33134386599994e-07, + "loss": 0.5208, + "step": 1353 + }, + { + "epoch": 2.8013793103448275, + "grad_norm": 0.6737064123153687, + "learning_rate": 1.303872553291824e-07, + "loss": 0.529, + "step": 1354 + }, + { + "epoch": 2.803448275862069, + "grad_norm": 0.6469187140464783, + "learning_rate": 1.2766838839376294e-07, + "loss": 0.5435, + "step": 1355 + }, + { + "epoch": 2.8055172413793104, + "grad_norm": 0.6671730279922485, + "learning_rate": 1.24977801574655e-07, + "loss": 0.5453, + "step": 1356 + }, + { + "epoch": 2.8075862068965516, + "grad_norm": 0.656061053276062, + "learning_rate": 1.2231551048863421e-07, + "loss": 0.5404, + "step": 1357 + }, + { + "epoch": 2.8096551724137933, + "grad_norm": 0.6444647312164307, + "learning_rate": 1.196815305882404e-07, + "loss": 0.5321, + "step": 1358 + }, + { + "epoch": 2.8117241379310345, + "grad_norm": 0.6776153445243835, + "learning_rate": 1.1707587716169033e-07, + "loss": 0.5226, + "step": 1359 + }, + { + "epoch": 2.8137931034482757, + "grad_norm": 0.6640626192092896, + "learning_rate": 1.1449856533278624e-07, + "loss": 0.5381, + "step": 1360 + }, + { + "epoch": 2.8158620689655174, + "grad_norm": 0.6596142053604126, + "learning_rate": 1.1194961006082972e-07, + "loss": 0.52, + "step": 1361 + }, + { + "epoch": 2.8179310344827586, + "grad_norm": 0.6379115581512451, + "learning_rate": 1.0942902614053453e-07, + "loss": 0.5294, + "step": 1362 + }, + { + "epoch": 2.82, + "grad_norm": 0.6417571902275085, + "learning_rate": 1.0693682820194062e-07, + "loss": 0.56, + "step": 1363 + }, + { + "epoch": 2.8220689655172415, + "grad_norm": 0.672452986240387, + "learning_rate": 1.0447303071032977e-07, + "loss": 0.5091, + "step": 1364 + }, + { + "epoch": 2.8241379310344827, + "grad_norm": 0.6543784737586975, + "learning_rate": 1.0203764796614057e-07, + "loss": 0.5518, + "step": 1365 + }, + { + "epoch": 2.8262068965517244, + "grad_norm": 0.6682656407356262, + "learning_rate": 9.963069410488635e-08, + "loss": 0.5146, + "step": 1366 + }, + { + "epoch": 2.8282758620689656, + "grad_norm": 0.6736637949943542, + "learning_rate": 9.725218309707129e-08, + "loss": 0.5245, + "step": 1367 + }, + { + "epoch": 2.830344827586207, + "grad_norm": 0.6793366074562073, + "learning_rate": 9.490212874811388e-08, + "loss": 0.5301, + "step": 1368 + }, + { + "epoch": 2.8324137931034485, + "grad_norm": 0.6533759832382202, + "learning_rate": 9.258054469825972e-08, + "loss": 0.5245, + "step": 1369 + }, + { + "epoch": 2.8344827586206898, + "grad_norm": 0.6463536024093628, + "learning_rate": 9.028744442251103e-08, + "loss": 0.5174, + "step": 1370 + }, + { + "epoch": 2.836551724137931, + "grad_norm": 0.691880464553833, + "learning_rate": 8.802284123053894e-08, + "loss": 0.5345, + "step": 1371 + }, + { + "epoch": 2.8386206896551727, + "grad_norm": 0.6536058187484741, + "learning_rate": 8.578674826661527e-08, + "loss": 0.5371, + "step": 1372 + }, + { + "epoch": 2.840689655172414, + "grad_norm": 0.658385157585144, + "learning_rate": 8.357917850952802e-08, + "loss": 0.5064, + "step": 1373 + }, + { + "epoch": 2.842758620689655, + "grad_norm": 0.662955105304718, + "learning_rate": 8.140014477251379e-08, + "loss": 0.5247, + "step": 1374 + }, + { + "epoch": 2.844827586206897, + "grad_norm": 0.6408557295799255, + "learning_rate": 7.924965970317722e-08, + "loss": 0.5348, + "step": 1375 + }, + { + "epoch": 2.846896551724138, + "grad_norm": 0.6421958208084106, + "learning_rate": 7.712773578342047e-08, + "loss": 0.5583, + "step": 1376 + }, + { + "epoch": 2.8489655172413793, + "grad_norm": 0.6565936803817749, + "learning_rate": 7.503438532937169e-08, + "loss": 0.5502, + "step": 1377 + }, + { + "epoch": 2.851034482758621, + "grad_norm": 0.6656866073608398, + "learning_rate": 7.296962049131051e-08, + "loss": 0.5256, + "step": 1378 + }, + { + "epoch": 2.853103448275862, + "grad_norm": 0.662927508354187, + "learning_rate": 7.093345325359935e-08, + "loss": 0.5399, + "step": 1379 + }, + { + "epoch": 2.8551724137931034, + "grad_norm": 0.664005696773529, + "learning_rate": 6.892589543461392e-08, + "loss": 0.5233, + "step": 1380 + }, + { + "epoch": 2.857241379310345, + "grad_norm": 0.6524354219436646, + "learning_rate": 6.694695868667556e-08, + "loss": 0.5318, + "step": 1381 + }, + { + "epoch": 2.8593103448275863, + "grad_norm": 0.6665657758712769, + "learning_rate": 6.49966544959807e-08, + "loss": 0.5241, + "step": 1382 + }, + { + "epoch": 2.8613793103448275, + "grad_norm": 0.6510589122772217, + "learning_rate": 6.307499418253705e-08, + "loss": 0.5293, + "step": 1383 + }, + { + "epoch": 2.863448275862069, + "grad_norm": 0.6608887910842896, + "learning_rate": 6.118198890009586e-08, + "loss": 0.5257, + "step": 1384 + }, + { + "epoch": 2.8655172413793104, + "grad_norm": 0.6739917993545532, + "learning_rate": 5.9317649636088656e-08, + "loss": 0.5544, + "step": 1385 + }, + { + "epoch": 2.8675862068965516, + "grad_norm": 0.6483108401298523, + "learning_rate": 5.748198721156284e-08, + "loss": 0.5297, + "step": 1386 + }, + { + "epoch": 2.8696551724137933, + "grad_norm": 0.653653621673584, + "learning_rate": 5.5675012281119486e-08, + "loss": 0.5445, + "step": 1387 + }, + { + "epoch": 2.8717241379310345, + "grad_norm": 0.6576898097991943, + "learning_rate": 5.389673533284956e-08, + "loss": 0.4969, + "step": 1388 + }, + { + "epoch": 2.8737931034482758, + "grad_norm": 0.6513156294822693, + "learning_rate": 5.214716668827558e-08, + "loss": 0.547, + "step": 1389 + }, + { + "epoch": 2.8758620689655174, + "grad_norm": 0.6595351099967957, + "learning_rate": 5.042631650229057e-08, + "loss": 0.5567, + "step": 1390 + }, + { + "epoch": 2.8779310344827587, + "grad_norm": 0.644473671913147, + "learning_rate": 4.8734194763098706e-08, + "loss": 0.5005, + "step": 1391 + }, + { + "epoch": 2.88, + "grad_norm": 0.6628778576850891, + "learning_rate": 4.7070811292157513e-08, + "loss": 0.5162, + "step": 1392 + }, + { + "epoch": 2.8820689655172416, + "grad_norm": 0.6727180480957031, + "learning_rate": 4.543617574412185e-08, + "loss": 0.5441, + "step": 1393 + }, + { + "epoch": 2.884137931034483, + "grad_norm": 0.666273832321167, + "learning_rate": 4.383029760678614e-08, + "loss": 0.5409, + "step": 1394 + }, + { + "epoch": 2.886206896551724, + "grad_norm": 0.6508522629737854, + "learning_rate": 4.2253186201031135e-08, + "loss": 0.5193, + "step": 1395 + }, + { + "epoch": 2.8882758620689657, + "grad_norm": 0.6973995566368103, + "learning_rate": 4.07048506807689e-08, + "loss": 0.5415, + "step": 1396 + }, + { + "epoch": 2.890344827586207, + "grad_norm": 0.6525006890296936, + "learning_rate": 3.9185300032889005e-08, + "loss": 0.5379, + "step": 1397 + }, + { + "epoch": 2.892413793103448, + "grad_norm": 0.677098274230957, + "learning_rate": 3.7694543077208546e-08, + "loss": 0.5413, + "step": 1398 + }, + { + "epoch": 2.89448275862069, + "grad_norm": 0.6358975172042847, + "learning_rate": 3.6232588466417195e-08, + "loss": 0.5449, + "step": 1399 + }, + { + "epoch": 2.896551724137931, + "grad_norm": 0.6577171087265015, + "learning_rate": 3.479944468603169e-08, + "loss": 0.5363, + "step": 1400 + }, + { + "epoch": 2.8986206896551723, + "grad_norm": 0.6604806780815125, + "learning_rate": 3.339512005434309e-08, + "loss": 0.5307, + "step": 1401 + }, + { + "epoch": 2.900689655172414, + "grad_norm": 0.6712777614593506, + "learning_rate": 3.2019622722369024e-08, + "loss": 0.537, + "step": 1402 + }, + { + "epoch": 2.902758620689655, + "grad_norm": 0.6645734310150146, + "learning_rate": 3.0672960673808205e-08, + "loss": 0.5775, + "step": 1403 + }, + { + "epoch": 2.9048275862068964, + "grad_norm": 0.6652960777282715, + "learning_rate": 2.93551417249921e-08, + "loss": 0.5579, + "step": 1404 + }, + { + "epoch": 2.906896551724138, + "grad_norm": 0.65793377161026, + "learning_rate": 2.8066173524839978e-08, + "loss": 0.5572, + "step": 1405 + }, + { + "epoch": 2.9089655172413793, + "grad_norm": 0.6762325167655945, + "learning_rate": 2.6806063554815632e-08, + "loss": 0.5502, + "step": 1406 + }, + { + "epoch": 2.9110344827586205, + "grad_norm": 0.6445053219795227, + "learning_rate": 2.5574819128882933e-08, + "loss": 0.5287, + "step": 1407 + }, + { + "epoch": 2.913103448275862, + "grad_norm": 0.651478111743927, + "learning_rate": 2.4372447393462562e-08, + "loss": 0.5344, + "step": 1408 + }, + { + "epoch": 2.9151724137931034, + "grad_norm": 0.6526092290878296, + "learning_rate": 2.319895532739369e-08, + "loss": 0.5081, + "step": 1409 + }, + { + "epoch": 2.9172413793103447, + "grad_norm": 0.6735363602638245, + "learning_rate": 2.205434974188847e-08, + "loss": 0.5468, + "step": 1410 + }, + { + "epoch": 2.9193103448275863, + "grad_norm": 0.6453359127044678, + "learning_rate": 2.093863728049872e-08, + "loss": 0.5245, + "step": 1411 + }, + { + "epoch": 2.9213793103448276, + "grad_norm": 0.6538481116294861, + "learning_rate": 1.9851824419070965e-08, + "loss": 0.5291, + "step": 1412 + }, + { + "epoch": 2.923448275862069, + "grad_norm": 0.6372977495193481, + "learning_rate": 1.8793917465713686e-08, + "loss": 0.5489, + "step": 1413 + }, + { + "epoch": 2.9255172413793105, + "grad_norm": 0.6539329290390015, + "learning_rate": 1.7764922560759014e-08, + "loss": 0.5307, + "step": 1414 + }, + { + "epoch": 2.9275862068965517, + "grad_norm": 0.6570875644683838, + "learning_rate": 1.676484567672554e-08, + "loss": 0.5509, + "step": 1415 + }, + { + "epoch": 2.929655172413793, + "grad_norm": 0.6732608079910278, + "learning_rate": 1.5793692618286115e-08, + "loss": 0.5501, + "step": 1416 + }, + { + "epoch": 2.9317241379310346, + "grad_norm": 0.638278067111969, + "learning_rate": 1.4851469022234e-08, + "loss": 0.5377, + "step": 1417 + }, + { + "epoch": 2.933793103448276, + "grad_norm": 0.6890416741371155, + "learning_rate": 1.3938180357447318e-08, + "loss": 0.503, + "step": 1418 + }, + { + "epoch": 2.935862068965517, + "grad_norm": 0.6768400073051453, + "learning_rate": 1.3053831924861315e-08, + "loss": 0.531, + "step": 1419 + }, + { + "epoch": 2.9379310344827587, + "grad_norm": 0.6668310165405273, + "learning_rate": 1.2198428857433941e-08, + "loss": 0.5394, + "step": 1420 + }, + { + "epoch": 2.94, + "grad_norm": 0.6531161665916443, + "learning_rate": 1.137197612011809e-08, + "loss": 0.526, + "step": 1421 + }, + { + "epoch": 2.942068965517241, + "grad_norm": 0.6525026559829712, + "learning_rate": 1.0574478509833286e-08, + "loss": 0.5175, + "step": 1422 + }, + { + "epoch": 2.944137931034483, + "grad_norm": 0.6503228545188904, + "learning_rate": 9.805940655436274e-09, + "loss": 0.534, + "step": 1423 + }, + { + "epoch": 2.946206896551724, + "grad_norm": 0.6437545418739319, + "learning_rate": 9.066367017694366e-09, + "loss": 0.5318, + "step": 1424 + }, + { + "epoch": 2.9482758620689653, + "grad_norm": 0.6803710460662842, + "learning_rate": 8.35576188926046e-09, + "loss": 0.5196, + "step": 1425 + }, + { + "epoch": 2.950344827586207, + "grad_norm": 0.6403658390045166, + "learning_rate": 7.674129394648623e-09, + "loss": 0.5252, + "step": 1426 + }, + { + "epoch": 2.952413793103448, + "grad_norm": 0.6625465750694275, + "learning_rate": 7.0214734902074314e-09, + "loss": 0.5389, + "step": 1427 + }, + { + "epoch": 2.9544827586206894, + "grad_norm": 0.6770764589309692, + "learning_rate": 6.3977979640994504e-09, + "loss": 0.5674, + "step": 1428 + }, + { + "epoch": 2.956551724137931, + "grad_norm": 0.6394649147987366, + "learning_rate": 5.803106436279571e-09, + "loss": 0.5134, + "step": 1429 + }, + { + "epoch": 2.9586206896551723, + "grad_norm": 0.6759665608406067, + "learning_rate": 5.237402358471144e-09, + "loss": 0.5349, + "step": 1430 + }, + { + "epoch": 2.9606896551724136, + "grad_norm": 0.6301043629646301, + "learning_rate": 4.700689014149329e-09, + "loss": 0.5539, + "step": 1431 + }, + { + "epoch": 2.9627586206896552, + "grad_norm": 0.6656786799430847, + "learning_rate": 4.192969518519441e-09, + "loss": 0.534, + "step": 1432 + }, + { + "epoch": 2.9648275862068965, + "grad_norm": 0.681606113910675, + "learning_rate": 3.71424681850141e-09, + "loss": 0.5136, + "step": 1433 + }, + { + "epoch": 2.9668965517241377, + "grad_norm": 0.6622211933135986, + "learning_rate": 3.2645236927092426e-09, + "loss": 0.5406, + "step": 1434 + }, + { + "epoch": 2.9689655172413794, + "grad_norm": 0.6423860192298889, + "learning_rate": 2.8438027514382517e-09, + "loss": 0.5265, + "step": 1435 + }, + { + "epoch": 2.9710344827586206, + "grad_norm": 0.6584990620613098, + "learning_rate": 2.452086436648404e-09, + "loss": 0.5342, + "step": 1436 + }, + { + "epoch": 2.973103448275862, + "grad_norm": 0.6501452922821045, + "learning_rate": 2.0893770219493347e-09, + "loss": 0.544, + "step": 1437 + }, + { + "epoch": 2.9751724137931035, + "grad_norm": 0.6575037240982056, + "learning_rate": 1.755676612588686e-09, + "loss": 0.5459, + "step": 1438 + }, + { + "epoch": 2.9772413793103447, + "grad_norm": 0.6507884860038757, + "learning_rate": 1.450987145439342e-09, + "loss": 0.5337, + "step": 1439 + }, + { + "epoch": 2.979310344827586, + "grad_norm": 0.6674343347549438, + "learning_rate": 1.1753103889883267e-09, + "loss": 0.5298, + "step": 1440 + }, + { + "epoch": 2.9813793103448276, + "grad_norm": 0.6635849475860596, + "learning_rate": 9.286479433257e-10, + "loss": 0.5505, + "step": 1441 + }, + { + "epoch": 2.983448275862069, + "grad_norm": 0.6534397006034851, + "learning_rate": 7.110012401362332e-10, + "loss": 0.5287, + "step": 1442 + }, + { + "epoch": 2.98551724137931, + "grad_norm": 0.6629782319068909, + "learning_rate": 5.2237154268997e-10, + "loss": 0.5412, + "step": 1443 + }, + { + "epoch": 2.9875862068965517, + "grad_norm": 0.628498375415802, + "learning_rate": 3.627599458377873e-10, + "loss": 0.5274, + "step": 1444 + }, + { + "epoch": 2.989655172413793, + "grad_norm": 0.6589900851249695, + "learning_rate": 2.321673760002918e-10, + "loss": 0.5285, + "step": 1445 + }, + { + "epoch": 2.9917241379310346, + "grad_norm": 0.6807800531387329, + "learning_rate": 1.305945911672657e-10, + "loss": 0.5263, + "step": 1446 + }, + { + "epoch": 2.993793103448276, + "grad_norm": 0.6564441919326782, + "learning_rate": 5.804218088933944e-11, + "loss": 0.5142, + "step": 1447 + }, + { + "epoch": 2.995862068965517, + "grad_norm": 0.6432551145553589, + "learning_rate": 1.4510566277992077e-11, + "loss": 0.5098, + "step": 1448 + }, + { + "epoch": 2.9979310344827588, + "grad_norm": 0.6673936247825623, + "learning_rate": 0.0, + "loss": 0.5185, + "step": 1449 + }, + { + "epoch": 2.9979310344827588, + "step": 1449, + "total_flos": 7.092133536927252e+17, + "train_loss": 0.6742002179780608, + "train_runtime": 25544.3965, + "train_samples_per_second": 5.449, + "train_steps_per_second": 0.057 + } + ], + "logging_steps": 1, + "max_steps": 1449, + "num_input_tokens_seen": 0, + "num_train_epochs": 3, + "save_steps": 500, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": true + }, + "attributes": {} + } + }, + "total_flos": 7.092133536927252e+17, + "train_batch_size": 1, + "trial_name": null, + "trial_params": null +}