diff --git "a/Flan-T5-Typosquat-detect/trainer_state.json" "b/Flan-T5-Typosquat-detect/trainer_state.json" new file mode 100644--- /dev/null +++ "b/Flan-T5-Typosquat-detect/trainer_state.json" @@ -0,0 +1,14065 @@ +{ + "best_metric": 0.012842352502048016, + "best_model_checkpoint": "typosquat_flan_model_peft/checkpoint-20000", + "epoch": 4.0, + "eval_steps": 500, + "global_step": 20000, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.002, + "grad_norm": 2.9712789058685303, + "learning_rate": 4.9980000000000006e-05, + "loss": 4.4769, + "step": 10 + }, + { + "epoch": 0.004, + "grad_norm": 2.7549290657043457, + "learning_rate": 4.996e-05, + "loss": 4.1624, + "step": 20 + }, + { + "epoch": 0.006, + "grad_norm": 3.280527114868164, + "learning_rate": 4.9940000000000006e-05, + "loss": 3.9022, + "step": 30 + }, + { + "epoch": 0.008, + "grad_norm": 3.6827425956726074, + "learning_rate": 4.992e-05, + "loss": 3.289, + "step": 40 + }, + { + "epoch": 0.01, + "grad_norm": 4.800412178039551, + "learning_rate": 4.99e-05, + "loss": 2.6414, + "step": 50 + }, + { + "epoch": 0.012, + "grad_norm": 3.7369704246520996, + "learning_rate": 4.9880000000000004e-05, + "loss": 1.8745, + "step": 60 + }, + { + "epoch": 0.014, + "grad_norm": 4.058594226837158, + "learning_rate": 4.986e-05, + "loss": 1.1633, + "step": 70 + }, + { + "epoch": 0.016, + "grad_norm": 4.471256256103516, + "learning_rate": 4.9840000000000004e-05, + "loss": 0.5817, + "step": 80 + }, + { + "epoch": 0.018, + "grad_norm": 2.974433660507202, + "learning_rate": 4.982e-05, + "loss": 0.2911, + "step": 90 + }, + { + "epoch": 0.02, + "grad_norm": 0.5866895914077759, + "learning_rate": 4.9800000000000004e-05, + "loss": 0.3218, + "step": 100 + }, + { + "epoch": 0.022, + "grad_norm": 0.8520813584327698, + "learning_rate": 4.978e-05, + "loss": 0.2104, + "step": 110 + }, + { + "epoch": 0.024, + "grad_norm": 0.2147858440876007, + "learning_rate": 4.976e-05, + "loss": 0.1706, + "step": 120 + }, + { + "epoch": 0.026, + "grad_norm": 3.007725238800049, + "learning_rate": 4.974e-05, + "loss": 0.3214, + "step": 130 + }, + { + "epoch": 0.028, + "grad_norm": 0.693766713142395, + "learning_rate": 4.972e-05, + "loss": 0.137, + "step": 140 + }, + { + "epoch": 0.03, + "grad_norm": 1.4026163816452026, + "learning_rate": 4.97e-05, + "loss": 0.1514, + "step": 150 + }, + { + "epoch": 0.032, + "grad_norm": 1.3514808416366577, + "learning_rate": 4.9680000000000005e-05, + "loss": 0.2081, + "step": 160 + }, + { + "epoch": 0.034, + "grad_norm": 1.0751250982284546, + "learning_rate": 4.966e-05, + "loss": 0.1561, + "step": 170 + }, + { + "epoch": 0.036, + "grad_norm": 1.2864580154418945, + "learning_rate": 4.9640000000000006e-05, + "loss": 0.0576, + "step": 180 + }, + { + "epoch": 0.038, + "grad_norm": 3.6556406021118164, + "learning_rate": 4.962e-05, + "loss": 0.2457, + "step": 190 + }, + { + "epoch": 0.04, + "grad_norm": 2.740448236465454, + "learning_rate": 4.96e-05, + "loss": 0.2094, + "step": 200 + }, + { + "epoch": 0.042, + "grad_norm": 1.7056159973144531, + "learning_rate": 4.958e-05, + "loss": 0.1938, + "step": 210 + }, + { + "epoch": 0.044, + "grad_norm": 0.500120997428894, + "learning_rate": 4.956e-05, + "loss": 0.2285, + "step": 220 + }, + { + "epoch": 0.046, + "grad_norm": 1.6456410884857178, + "learning_rate": 4.9540000000000003e-05, + "loss": 0.2164, + "step": 230 + }, + { + "epoch": 0.048, + "grad_norm": 0.29337406158447266, + "learning_rate": 4.952e-05, + "loss": 0.1568, + "step": 240 + }, + { + "epoch": 0.05, + "grad_norm": 1.0422818660736084, + "learning_rate": 4.9500000000000004e-05, + "loss": 0.1407, + "step": 250 + }, + { + "epoch": 0.052, + "grad_norm": 3.711244821548462, + "learning_rate": 4.948000000000001e-05, + "loss": 0.2156, + "step": 260 + }, + { + "epoch": 0.054, + "grad_norm": 1.4521507024765015, + "learning_rate": 4.946e-05, + "loss": 0.1499, + "step": 270 + }, + { + "epoch": 0.056, + "grad_norm": 1.1018396615982056, + "learning_rate": 4.944e-05, + "loss": 0.1733, + "step": 280 + }, + { + "epoch": 0.058, + "grad_norm": 3.3761978149414062, + "learning_rate": 4.942e-05, + "loss": 0.2515, + "step": 290 + }, + { + "epoch": 0.06, + "grad_norm": 1.1806458234786987, + "learning_rate": 4.94e-05, + "loss": 0.1081, + "step": 300 + }, + { + "epoch": 0.062, + "grad_norm": 1.5186078548431396, + "learning_rate": 4.9380000000000005e-05, + "loss": 0.1387, + "step": 310 + }, + { + "epoch": 0.064, + "grad_norm": 0.5397683382034302, + "learning_rate": 4.936e-05, + "loss": 0.087, + "step": 320 + }, + { + "epoch": 0.066, + "grad_norm": 0.28479281067848206, + "learning_rate": 4.9340000000000005e-05, + "loss": 0.0819, + "step": 330 + }, + { + "epoch": 0.068, + "grad_norm": 0.3176600933074951, + "learning_rate": 4.932e-05, + "loss": 0.1401, + "step": 340 + }, + { + "epoch": 0.07, + "grad_norm": 0.14727644622325897, + "learning_rate": 4.93e-05, + "loss": 0.1139, + "step": 350 + }, + { + "epoch": 0.072, + "grad_norm": 0.6405311226844788, + "learning_rate": 4.928e-05, + "loss": 0.1413, + "step": 360 + }, + { + "epoch": 0.074, + "grad_norm": 2.0455615520477295, + "learning_rate": 4.926e-05, + "loss": 0.102, + "step": 370 + }, + { + "epoch": 0.076, + "grad_norm": 1.2913883924484253, + "learning_rate": 4.924e-05, + "loss": 0.0346, + "step": 380 + }, + { + "epoch": 0.078, + "grad_norm": 0.3396105170249939, + "learning_rate": 4.9220000000000006e-05, + "loss": 0.1247, + "step": 390 + }, + { + "epoch": 0.08, + "grad_norm": 0.8023151159286499, + "learning_rate": 4.92e-05, + "loss": 0.0915, + "step": 400 + }, + { + "epoch": 0.082, + "grad_norm": 2.867755651473999, + "learning_rate": 4.918000000000001e-05, + "loss": 0.0638, + "step": 410 + }, + { + "epoch": 0.084, + "grad_norm": 4.464131832122803, + "learning_rate": 4.9160000000000004e-05, + "loss": 0.1038, + "step": 420 + }, + { + "epoch": 0.086, + "grad_norm": 0.06573939323425293, + "learning_rate": 4.914e-05, + "loss": 0.0835, + "step": 430 + }, + { + "epoch": 0.088, + "grad_norm": 3.679654121398926, + "learning_rate": 4.9120000000000004e-05, + "loss": 0.0967, + "step": 440 + }, + { + "epoch": 0.09, + "grad_norm": 0.6470697522163391, + "learning_rate": 4.91e-05, + "loss": 0.1244, + "step": 450 + }, + { + "epoch": 0.092, + "grad_norm": 0.11164885014295578, + "learning_rate": 4.9080000000000004e-05, + "loss": 0.2084, + "step": 460 + }, + { + "epoch": 0.094, + "grad_norm": 0.10385742038488388, + "learning_rate": 4.906e-05, + "loss": 0.0626, + "step": 470 + }, + { + "epoch": 0.096, + "grad_norm": 2.7525277137756348, + "learning_rate": 4.9040000000000005e-05, + "loss": 0.07, + "step": 480 + }, + { + "epoch": 0.098, + "grad_norm": 0.11531264334917068, + "learning_rate": 4.902e-05, + "loss": 0.0261, + "step": 490 + }, + { + "epoch": 0.1, + "grad_norm": 3.795445203781128, + "learning_rate": 4.9e-05, + "loss": 0.1517, + "step": 500 + }, + { + "epoch": 0.102, + "grad_norm": 0.14794982969760895, + "learning_rate": 4.898e-05, + "loss": 0.1346, + "step": 510 + }, + { + "epoch": 0.104, + "grad_norm": 3.277275800704956, + "learning_rate": 4.896e-05, + "loss": 0.0195, + "step": 520 + }, + { + "epoch": 0.106, + "grad_norm": 5.350284099578857, + "learning_rate": 4.894e-05, + "loss": 0.1441, + "step": 530 + }, + { + "epoch": 0.108, + "grad_norm": 0.12020950764417648, + "learning_rate": 4.8920000000000006e-05, + "loss": 0.0465, + "step": 540 + }, + { + "epoch": 0.11, + "grad_norm": 0.7666958570480347, + "learning_rate": 4.89e-05, + "loss": 0.0169, + "step": 550 + }, + { + "epoch": 0.112, + "grad_norm": 2.0386059284210205, + "learning_rate": 4.8880000000000006e-05, + "loss": 0.1544, + "step": 560 + }, + { + "epoch": 0.114, + "grad_norm": 0.1720697283744812, + "learning_rate": 4.886e-05, + "loss": 0.0535, + "step": 570 + }, + { + "epoch": 0.116, + "grad_norm": 0.04188171774148941, + "learning_rate": 4.884e-05, + "loss": 0.0547, + "step": 580 + }, + { + "epoch": 0.118, + "grad_norm": 0.36014607548713684, + "learning_rate": 4.8820000000000004e-05, + "loss": 0.0707, + "step": 590 + }, + { + "epoch": 0.12, + "grad_norm": 0.06317629665136337, + "learning_rate": 4.88e-05, + "loss": 0.0984, + "step": 600 + }, + { + "epoch": 0.122, + "grad_norm": 0.04827451333403587, + "learning_rate": 4.8780000000000004e-05, + "loss": 0.0322, + "step": 610 + }, + { + "epoch": 0.124, + "grad_norm": 0.09840811043977737, + "learning_rate": 4.876e-05, + "loss": 0.0579, + "step": 620 + }, + { + "epoch": 0.126, + "grad_norm": 1.4731272459030151, + "learning_rate": 4.8740000000000004e-05, + "loss": 0.0203, + "step": 630 + }, + { + "epoch": 0.128, + "grad_norm": 0.04075102135539055, + "learning_rate": 4.872000000000001e-05, + "loss": 0.0479, + "step": 640 + }, + { + "epoch": 0.13, + "grad_norm": 2.8817827701568604, + "learning_rate": 4.87e-05, + "loss": 0.0964, + "step": 650 + }, + { + "epoch": 0.132, + "grad_norm": 3.602795124053955, + "learning_rate": 4.868e-05, + "loss": 0.0772, + "step": 660 + }, + { + "epoch": 0.134, + "grad_norm": 7.180420875549316, + "learning_rate": 4.866e-05, + "loss": 0.0225, + "step": 670 + }, + { + "epoch": 0.136, + "grad_norm": 0.40698742866516113, + "learning_rate": 4.864e-05, + "loss": 0.0883, + "step": 680 + }, + { + "epoch": 0.138, + "grad_norm": 4.945473670959473, + "learning_rate": 4.8620000000000005e-05, + "loss": 0.106, + "step": 690 + }, + { + "epoch": 0.14, + "grad_norm": 0.16914227604866028, + "learning_rate": 4.86e-05, + "loss": 0.1672, + "step": 700 + }, + { + "epoch": 0.142, + "grad_norm": 0.5895906090736389, + "learning_rate": 4.8580000000000006e-05, + "loss": 0.0754, + "step": 710 + }, + { + "epoch": 0.144, + "grad_norm": 0.2772788107395172, + "learning_rate": 4.856e-05, + "loss": 0.0339, + "step": 720 + }, + { + "epoch": 0.146, + "grad_norm": 0.1928812563419342, + "learning_rate": 4.854e-05, + "loss": 0.0592, + "step": 730 + }, + { + "epoch": 0.148, + "grad_norm": 0.046394895762205124, + "learning_rate": 4.852e-05, + "loss": 0.0454, + "step": 740 + }, + { + "epoch": 0.15, + "grad_norm": 4.180880069732666, + "learning_rate": 4.85e-05, + "loss": 0.1045, + "step": 750 + }, + { + "epoch": 0.152, + "grad_norm": 3.7465639114379883, + "learning_rate": 4.8480000000000003e-05, + "loss": 0.0181, + "step": 760 + }, + { + "epoch": 0.154, + "grad_norm": 0.293394535779953, + "learning_rate": 4.846e-05, + "loss": 0.0094, + "step": 770 + }, + { + "epoch": 0.156, + "grad_norm": 0.0170027744024992, + "learning_rate": 4.8440000000000004e-05, + "loss": 0.0174, + "step": 780 + }, + { + "epoch": 0.158, + "grad_norm": 0.021193701773881912, + "learning_rate": 4.842000000000001e-05, + "loss": 0.1072, + "step": 790 + }, + { + "epoch": 0.16, + "grad_norm": 0.5707680583000183, + "learning_rate": 4.8400000000000004e-05, + "loss": 0.0646, + "step": 800 + }, + { + "epoch": 0.162, + "grad_norm": 0.08426664769649506, + "learning_rate": 4.838e-05, + "loss": 0.1026, + "step": 810 + }, + { + "epoch": 0.164, + "grad_norm": 1.9383238554000854, + "learning_rate": 4.836e-05, + "loss": 0.0185, + "step": 820 + }, + { + "epoch": 0.166, + "grad_norm": 0.04476593807339668, + "learning_rate": 4.834e-05, + "loss": 0.0397, + "step": 830 + }, + { + "epoch": 0.168, + "grad_norm": 7.816348552703857, + "learning_rate": 4.8320000000000005e-05, + "loss": 0.1227, + "step": 840 + }, + { + "epoch": 0.17, + "grad_norm": 0.10927680134773254, + "learning_rate": 4.83e-05, + "loss": 0.0911, + "step": 850 + }, + { + "epoch": 0.172, + "grad_norm": 0.10985364019870758, + "learning_rate": 4.8280000000000005e-05, + "loss": 0.0128, + "step": 860 + }, + { + "epoch": 0.174, + "grad_norm": 1.405458927154541, + "learning_rate": 4.826e-05, + "loss": 0.0182, + "step": 870 + }, + { + "epoch": 0.176, + "grad_norm": 3.5338621139526367, + "learning_rate": 4.824e-05, + "loss": 0.0624, + "step": 880 + }, + { + "epoch": 0.178, + "grad_norm": 0.1506468653678894, + "learning_rate": 4.822e-05, + "loss": 0.0269, + "step": 890 + }, + { + "epoch": 0.18, + "grad_norm": 0.00646029831841588, + "learning_rate": 4.82e-05, + "loss": 0.0407, + "step": 900 + }, + { + "epoch": 0.182, + "grad_norm": 0.12899932265281677, + "learning_rate": 4.818e-05, + "loss": 0.0659, + "step": 910 + }, + { + "epoch": 0.184, + "grad_norm": 2.2137913703918457, + "learning_rate": 4.816e-05, + "loss": 0.0145, + "step": 920 + }, + { + "epoch": 0.186, + "grad_norm": 0.20166102051734924, + "learning_rate": 4.814e-05, + "loss": 0.0912, + "step": 930 + }, + { + "epoch": 0.188, + "grad_norm": 0.6006794571876526, + "learning_rate": 4.812000000000001e-05, + "loss": 0.018, + "step": 940 + }, + { + "epoch": 0.19, + "grad_norm": 0.8807332515716553, + "learning_rate": 4.8100000000000004e-05, + "loss": 0.0028, + "step": 950 + }, + { + "epoch": 0.192, + "grad_norm": 0.03953103348612785, + "learning_rate": 4.808e-05, + "loss": 0.0028, + "step": 960 + }, + { + "epoch": 0.194, + "grad_norm": 0.0035690057557076216, + "learning_rate": 4.8060000000000004e-05, + "loss": 0.018, + "step": 970 + }, + { + "epoch": 0.196, + "grad_norm": 0.009828136302530766, + "learning_rate": 4.804e-05, + "loss": 0.027, + "step": 980 + }, + { + "epoch": 0.198, + "grad_norm": 0.19281725585460663, + "learning_rate": 4.8020000000000004e-05, + "loss": 0.0752, + "step": 990 + }, + { + "epoch": 0.2, + "grad_norm": 0.008602121844887733, + "learning_rate": 4.8e-05, + "loss": 0.2189, + "step": 1000 + }, + { + "epoch": 0.202, + "grad_norm": 3.5480191707611084, + "learning_rate": 4.7980000000000005e-05, + "loss": 0.197, + "step": 1010 + }, + { + "epoch": 0.204, + "grad_norm": 0.06215432286262512, + "learning_rate": 4.796e-05, + "loss": 0.0947, + "step": 1020 + }, + { + "epoch": 0.206, + "grad_norm": 0.5615971684455872, + "learning_rate": 4.794e-05, + "loss": 0.037, + "step": 1030 + }, + { + "epoch": 0.208, + "grad_norm": 0.30555933713912964, + "learning_rate": 4.792e-05, + "loss": 0.0095, + "step": 1040 + }, + { + "epoch": 0.21, + "grad_norm": 0.0339435338973999, + "learning_rate": 4.79e-05, + "loss": 0.0199, + "step": 1050 + }, + { + "epoch": 0.212, + "grad_norm": 0.011141384951770306, + "learning_rate": 4.788e-05, + "loss": 0.0678, + "step": 1060 + }, + { + "epoch": 0.214, + "grad_norm": 0.003661577822640538, + "learning_rate": 4.7860000000000006e-05, + "loss": 0.163, + "step": 1070 + }, + { + "epoch": 0.216, + "grad_norm": 0.01145030278712511, + "learning_rate": 4.784e-05, + "loss": 0.0227, + "step": 1080 + }, + { + "epoch": 0.218, + "grad_norm": 0.08786927908658981, + "learning_rate": 4.7820000000000006e-05, + "loss": 0.0769, + "step": 1090 + }, + { + "epoch": 0.22, + "grad_norm": 0.008924941532313824, + "learning_rate": 4.78e-05, + "loss": 0.0196, + "step": 1100 + }, + { + "epoch": 0.222, + "grad_norm": 4.367251873016357, + "learning_rate": 4.778e-05, + "loss": 0.0152, + "step": 1110 + }, + { + "epoch": 0.224, + "grad_norm": 0.030621623620390892, + "learning_rate": 4.7760000000000004e-05, + "loss": 0.0948, + "step": 1120 + }, + { + "epoch": 0.226, + "grad_norm": 0.0778178870677948, + "learning_rate": 4.774e-05, + "loss": 0.0352, + "step": 1130 + }, + { + "epoch": 0.228, + "grad_norm": 0.10039296001195908, + "learning_rate": 4.7720000000000004e-05, + "loss": 0.0405, + "step": 1140 + }, + { + "epoch": 0.23, + "grad_norm": 1.098934531211853, + "learning_rate": 4.77e-05, + "loss": 0.0287, + "step": 1150 + }, + { + "epoch": 0.232, + "grad_norm": 0.3654647767543793, + "learning_rate": 4.7680000000000004e-05, + "loss": 0.0643, + "step": 1160 + }, + { + "epoch": 0.234, + "grad_norm": 0.3039003312587738, + "learning_rate": 4.766000000000001e-05, + "loss": 0.0244, + "step": 1170 + }, + { + "epoch": 0.236, + "grad_norm": 3.5462236404418945, + "learning_rate": 4.7640000000000005e-05, + "loss": 0.0468, + "step": 1180 + }, + { + "epoch": 0.238, + "grad_norm": 0.01479172334074974, + "learning_rate": 4.762e-05, + "loss": 0.0078, + "step": 1190 + }, + { + "epoch": 0.24, + "grad_norm": 4.272269248962402, + "learning_rate": 4.76e-05, + "loss": 0.0113, + "step": 1200 + }, + { + "epoch": 0.242, + "grad_norm": 4.091390609741211, + "learning_rate": 4.758e-05, + "loss": 0.0121, + "step": 1210 + }, + { + "epoch": 0.244, + "grad_norm": 0.03659886121749878, + "learning_rate": 4.7560000000000005e-05, + "loss": 0.1266, + "step": 1220 + }, + { + "epoch": 0.246, + "grad_norm": 0.157633975148201, + "learning_rate": 4.754e-05, + "loss": 0.0193, + "step": 1230 + }, + { + "epoch": 0.248, + "grad_norm": 0.0051111564971506596, + "learning_rate": 4.7520000000000006e-05, + "loss": 0.0081, + "step": 1240 + }, + { + "epoch": 0.25, + "grad_norm": 1.567456603050232, + "learning_rate": 4.75e-05, + "loss": 0.0048, + "step": 1250 + }, + { + "epoch": 0.252, + "grad_norm": 3.683023452758789, + "learning_rate": 4.748e-05, + "loss": 0.0195, + "step": 1260 + }, + { + "epoch": 0.254, + "grad_norm": 0.06732743978500366, + "learning_rate": 4.746e-05, + "loss": 0.0374, + "step": 1270 + }, + { + "epoch": 0.256, + "grad_norm": 0.10905110090970993, + "learning_rate": 4.744e-05, + "loss": 0.0392, + "step": 1280 + }, + { + "epoch": 0.258, + "grad_norm": 0.47610998153686523, + "learning_rate": 4.742e-05, + "loss": 0.0029, + "step": 1290 + }, + { + "epoch": 0.26, + "grad_norm": 0.13898958265781403, + "learning_rate": 4.74e-05, + "loss": 0.0551, + "step": 1300 + }, + { + "epoch": 0.262, + "grad_norm": 0.012497484683990479, + "learning_rate": 4.7380000000000004e-05, + "loss": 0.1259, + "step": 1310 + }, + { + "epoch": 0.264, + "grad_norm": 0.013291290029883385, + "learning_rate": 4.736000000000001e-05, + "loss": 0.0031, + "step": 1320 + }, + { + "epoch": 0.266, + "grad_norm": 0.020174721255898476, + "learning_rate": 4.7340000000000004e-05, + "loss": 0.0552, + "step": 1330 + }, + { + "epoch": 0.268, + "grad_norm": 0.36099356412887573, + "learning_rate": 4.732e-05, + "loss": 0.0522, + "step": 1340 + }, + { + "epoch": 0.27, + "grad_norm": 0.03428328409790993, + "learning_rate": 4.73e-05, + "loss": 0.0121, + "step": 1350 + }, + { + "epoch": 0.272, + "grad_norm": 0.10378572344779968, + "learning_rate": 4.728e-05, + "loss": 0.1406, + "step": 1360 + }, + { + "epoch": 0.274, + "grad_norm": 0.06635264307260513, + "learning_rate": 4.7260000000000005e-05, + "loss": 0.0073, + "step": 1370 + }, + { + "epoch": 0.276, + "grad_norm": 0.03210114687681198, + "learning_rate": 4.724e-05, + "loss": 0.0383, + "step": 1380 + }, + { + "epoch": 0.278, + "grad_norm": 0.016254939138889313, + "learning_rate": 4.7220000000000005e-05, + "loss": 0.0192, + "step": 1390 + }, + { + "epoch": 0.28, + "grad_norm": 0.9825054407119751, + "learning_rate": 4.72e-05, + "loss": 0.0063, + "step": 1400 + }, + { + "epoch": 0.282, + "grad_norm": 0.39390048384666443, + "learning_rate": 4.718e-05, + "loss": 0.0018, + "step": 1410 + }, + { + "epoch": 0.284, + "grad_norm": 0.0035170663613826036, + "learning_rate": 4.716e-05, + "loss": 0.0057, + "step": 1420 + }, + { + "epoch": 0.286, + "grad_norm": 0.38211411237716675, + "learning_rate": 4.714e-05, + "loss": 0.0115, + "step": 1430 + }, + { + "epoch": 0.288, + "grad_norm": 0.008729588240385056, + "learning_rate": 4.712e-05, + "loss": 0.0408, + "step": 1440 + }, + { + "epoch": 0.29, + "grad_norm": 0.8136197328567505, + "learning_rate": 4.71e-05, + "loss": 0.0021, + "step": 1450 + }, + { + "epoch": 0.292, + "grad_norm": 2.9295754432678223, + "learning_rate": 4.708e-05, + "loss": 0.1572, + "step": 1460 + }, + { + "epoch": 0.294, + "grad_norm": 0.9183974862098694, + "learning_rate": 4.706000000000001e-05, + "loss": 0.088, + "step": 1470 + }, + { + "epoch": 0.296, + "grad_norm": 0.022023877128958702, + "learning_rate": 4.7040000000000004e-05, + "loss": 0.0209, + "step": 1480 + }, + { + "epoch": 0.298, + "grad_norm": 3.611262559890747, + "learning_rate": 4.702e-05, + "loss": 0.0424, + "step": 1490 + }, + { + "epoch": 0.3, + "grad_norm": 3.3381121158599854, + "learning_rate": 4.7e-05, + "loss": 0.0403, + "step": 1500 + }, + { + "epoch": 0.302, + "grad_norm": 0.0031520898919552565, + "learning_rate": 4.698e-05, + "loss": 0.0016, + "step": 1510 + }, + { + "epoch": 0.304, + "grad_norm": 0.007907604798674583, + "learning_rate": 4.6960000000000004e-05, + "loss": 0.0437, + "step": 1520 + }, + { + "epoch": 0.306, + "grad_norm": 0.18256665766239166, + "learning_rate": 4.694e-05, + "loss": 0.0468, + "step": 1530 + }, + { + "epoch": 0.308, + "grad_norm": 0.08469197154045105, + "learning_rate": 4.6920000000000005e-05, + "loss": 0.094, + "step": 1540 + }, + { + "epoch": 0.31, + "grad_norm": 0.023916004225611687, + "learning_rate": 4.69e-05, + "loss": 0.0338, + "step": 1550 + }, + { + "epoch": 0.312, + "grad_norm": 0.047887589782476425, + "learning_rate": 4.688e-05, + "loss": 0.0154, + "step": 1560 + }, + { + "epoch": 0.314, + "grad_norm": 11.07988452911377, + "learning_rate": 4.686e-05, + "loss": 0.0529, + "step": 1570 + }, + { + "epoch": 0.316, + "grad_norm": 0.3059365749359131, + "learning_rate": 4.684e-05, + "loss": 0.0585, + "step": 1580 + }, + { + "epoch": 0.318, + "grad_norm": 0.30327826738357544, + "learning_rate": 4.682e-05, + "loss": 0.0207, + "step": 1590 + }, + { + "epoch": 0.32, + "grad_norm": 0.0919349268078804, + "learning_rate": 4.6800000000000006e-05, + "loss": 0.0873, + "step": 1600 + }, + { + "epoch": 0.322, + "grad_norm": 1.0741651058197021, + "learning_rate": 4.678e-05, + "loss": 0.0699, + "step": 1610 + }, + { + "epoch": 0.324, + "grad_norm": 0.00857605691999197, + "learning_rate": 4.6760000000000006e-05, + "loss": 0.0869, + "step": 1620 + }, + { + "epoch": 0.326, + "grad_norm": 0.035690005868673325, + "learning_rate": 4.674e-05, + "loss": 0.006, + "step": 1630 + }, + { + "epoch": 0.328, + "grad_norm": 0.08569003641605377, + "learning_rate": 4.672e-05, + "loss": 0.0024, + "step": 1640 + }, + { + "epoch": 0.33, + "grad_norm": 0.11262888461351395, + "learning_rate": 4.6700000000000003e-05, + "loss": 0.0475, + "step": 1650 + }, + { + "epoch": 0.332, + "grad_norm": 0.0029759930912405252, + "learning_rate": 4.668e-05, + "loss": 0.1158, + "step": 1660 + }, + { + "epoch": 0.334, + "grad_norm": 0.14019815623760223, + "learning_rate": 4.6660000000000004e-05, + "loss": 0.0015, + "step": 1670 + }, + { + "epoch": 0.336, + "grad_norm": 0.06434296816587448, + "learning_rate": 4.664e-05, + "loss": 0.072, + "step": 1680 + }, + { + "epoch": 0.338, + "grad_norm": 0.02371792495250702, + "learning_rate": 4.6620000000000004e-05, + "loss": 0.0292, + "step": 1690 + }, + { + "epoch": 0.34, + "grad_norm": 6.26638126373291, + "learning_rate": 4.660000000000001e-05, + "loss": 0.0654, + "step": 1700 + }, + { + "epoch": 0.342, + "grad_norm": 0.013329977169632912, + "learning_rate": 4.6580000000000005e-05, + "loss": 0.0507, + "step": 1710 + }, + { + "epoch": 0.344, + "grad_norm": 0.020742950960993767, + "learning_rate": 4.656e-05, + "loss": 0.0008, + "step": 1720 + }, + { + "epoch": 0.346, + "grad_norm": 0.10419709235429764, + "learning_rate": 4.654e-05, + "loss": 0.0156, + "step": 1730 + }, + { + "epoch": 0.348, + "grad_norm": 0.001128773088566959, + "learning_rate": 4.652e-05, + "loss": 0.0046, + "step": 1740 + }, + { + "epoch": 0.35, + "grad_norm": 0.02164996601641178, + "learning_rate": 4.6500000000000005e-05, + "loss": 0.0009, + "step": 1750 + }, + { + "epoch": 0.352, + "grad_norm": 0.002971704350784421, + "learning_rate": 4.648e-05, + "loss": 0.0299, + "step": 1760 + }, + { + "epoch": 0.354, + "grad_norm": 0.06722798943519592, + "learning_rate": 4.6460000000000006e-05, + "loss": 0.0352, + "step": 1770 + }, + { + "epoch": 0.356, + "grad_norm": 0.006361286621540785, + "learning_rate": 4.644e-05, + "loss": 0.0042, + "step": 1780 + }, + { + "epoch": 0.358, + "grad_norm": 5.5403571128845215, + "learning_rate": 4.642e-05, + "loss": 0.1371, + "step": 1790 + }, + { + "epoch": 0.36, + "grad_norm": 0.2011372298002243, + "learning_rate": 4.64e-05, + "loss": 0.0963, + "step": 1800 + }, + { + "epoch": 0.362, + "grad_norm": 0.008815037086606026, + "learning_rate": 4.638e-05, + "loss": 0.0445, + "step": 1810 + }, + { + "epoch": 0.364, + "grad_norm": 0.010142517276108265, + "learning_rate": 4.636e-05, + "loss": 0.0581, + "step": 1820 + }, + { + "epoch": 0.366, + "grad_norm": 0.002696698298677802, + "learning_rate": 4.634e-05, + "loss": 0.0435, + "step": 1830 + }, + { + "epoch": 0.368, + "grad_norm": 0.05711021274328232, + "learning_rate": 4.6320000000000004e-05, + "loss": 0.0221, + "step": 1840 + }, + { + "epoch": 0.37, + "grad_norm": 0.1295560747385025, + "learning_rate": 4.630000000000001e-05, + "loss": 0.0039, + "step": 1850 + }, + { + "epoch": 0.372, + "grad_norm": 8.641880989074707, + "learning_rate": 4.6280000000000004e-05, + "loss": 0.05, + "step": 1860 + }, + { + "epoch": 0.374, + "grad_norm": 0.008340752683579922, + "learning_rate": 4.626e-05, + "loss": 0.1107, + "step": 1870 + }, + { + "epoch": 0.376, + "grad_norm": 0.041587088257074356, + "learning_rate": 4.624e-05, + "loss": 0.0902, + "step": 1880 + }, + { + "epoch": 0.378, + "grad_norm": 1.1638970375061035, + "learning_rate": 4.622e-05, + "loss": 0.0297, + "step": 1890 + }, + { + "epoch": 0.38, + "grad_norm": 0.016888264566659927, + "learning_rate": 4.6200000000000005e-05, + "loss": 0.0078, + "step": 1900 + }, + { + "epoch": 0.382, + "grad_norm": 0.07481393963098526, + "learning_rate": 4.618e-05, + "loss": 0.0348, + "step": 1910 + }, + { + "epoch": 0.384, + "grad_norm": 4.364596843719482, + "learning_rate": 4.6160000000000005e-05, + "loss": 0.1122, + "step": 1920 + }, + { + "epoch": 0.386, + "grad_norm": 0.0195784792304039, + "learning_rate": 4.614e-05, + "loss": 0.0434, + "step": 1930 + }, + { + "epoch": 0.388, + "grad_norm": 6.045379161834717, + "learning_rate": 4.612e-05, + "loss": 0.0194, + "step": 1940 + }, + { + "epoch": 0.39, + "grad_norm": 0.0069319577887654305, + "learning_rate": 4.61e-05, + "loss": 0.0809, + "step": 1950 + }, + { + "epoch": 0.392, + "grad_norm": 3.495983839035034, + "learning_rate": 4.608e-05, + "loss": 0.0738, + "step": 1960 + }, + { + "epoch": 0.394, + "grad_norm": 0.19603276252746582, + "learning_rate": 4.606e-05, + "loss": 0.0115, + "step": 1970 + }, + { + "epoch": 0.396, + "grad_norm": 3.1189138889312744, + "learning_rate": 4.604e-05, + "loss": 0.0076, + "step": 1980 + }, + { + "epoch": 0.398, + "grad_norm": 0.010036235675215721, + "learning_rate": 4.602e-05, + "loss": 0.0124, + "step": 1990 + }, + { + "epoch": 0.4, + "grad_norm": 0.02806951105594635, + "learning_rate": 4.600000000000001e-05, + "loss": 0.0127, + "step": 2000 + }, + { + "epoch": 0.402, + "grad_norm": 0.19674068689346313, + "learning_rate": 4.5980000000000004e-05, + "loss": 0.0184, + "step": 2010 + }, + { + "epoch": 0.404, + "grad_norm": 0.021522896364331245, + "learning_rate": 4.596e-05, + "loss": 0.0021, + "step": 2020 + }, + { + "epoch": 0.406, + "grad_norm": 0.044582124799489975, + "learning_rate": 4.594e-05, + "loss": 0.0632, + "step": 2030 + }, + { + "epoch": 0.408, + "grad_norm": 0.004063369706273079, + "learning_rate": 4.592e-05, + "loss": 0.001, + "step": 2040 + }, + { + "epoch": 0.41, + "grad_norm": 0.032624807208776474, + "learning_rate": 4.5900000000000004e-05, + "loss": 0.0493, + "step": 2050 + }, + { + "epoch": 0.412, + "grad_norm": 0.018941815942525864, + "learning_rate": 4.588e-05, + "loss": 0.0024, + "step": 2060 + }, + { + "epoch": 0.414, + "grad_norm": 0.4768604636192322, + "learning_rate": 4.5860000000000005e-05, + "loss": 0.0317, + "step": 2070 + }, + { + "epoch": 0.416, + "grad_norm": 0.007809026166796684, + "learning_rate": 4.584e-05, + "loss": 0.0849, + "step": 2080 + }, + { + "epoch": 0.418, + "grad_norm": 0.09230257570743561, + "learning_rate": 4.5820000000000005e-05, + "loss": 0.0032, + "step": 2090 + }, + { + "epoch": 0.42, + "grad_norm": 0.0211187656968832, + "learning_rate": 4.58e-05, + "loss": 0.0148, + "step": 2100 + }, + { + "epoch": 0.422, + "grad_norm": 0.027965866029262543, + "learning_rate": 4.578e-05, + "loss": 0.0776, + "step": 2110 + }, + { + "epoch": 0.424, + "grad_norm": 0.006458558142185211, + "learning_rate": 4.576e-05, + "loss": 0.0127, + "step": 2120 + }, + { + "epoch": 0.426, + "grad_norm": 2.797787666320801, + "learning_rate": 4.574e-05, + "loss": 0.0425, + "step": 2130 + }, + { + "epoch": 0.428, + "grad_norm": 0.008578737266361713, + "learning_rate": 4.572e-05, + "loss": 0.0697, + "step": 2140 + }, + { + "epoch": 0.43, + "grad_norm": 0.8794230818748474, + "learning_rate": 4.5700000000000006e-05, + "loss": 0.049, + "step": 2150 + }, + { + "epoch": 0.432, + "grad_norm": 0.09468420594930649, + "learning_rate": 4.568e-05, + "loss": 0.0515, + "step": 2160 + }, + { + "epoch": 0.434, + "grad_norm": 0.010913840495049953, + "learning_rate": 4.566e-05, + "loss": 0.0129, + "step": 2170 + }, + { + "epoch": 0.436, + "grad_norm": 0.024601969867944717, + "learning_rate": 4.564e-05, + "loss": 0.007, + "step": 2180 + }, + { + "epoch": 0.438, + "grad_norm": 0.08446158468723297, + "learning_rate": 4.562e-05, + "loss": 0.0543, + "step": 2190 + }, + { + "epoch": 0.44, + "grad_norm": 0.03246144950389862, + "learning_rate": 4.5600000000000004e-05, + "loss": 0.0081, + "step": 2200 + }, + { + "epoch": 0.442, + "grad_norm": 5.424055576324463, + "learning_rate": 4.558e-05, + "loss": 0.033, + "step": 2210 + }, + { + "epoch": 0.444, + "grad_norm": 0.060880791395902634, + "learning_rate": 4.5560000000000004e-05, + "loss": 0.0291, + "step": 2220 + }, + { + "epoch": 0.446, + "grad_norm": 0.01959436945617199, + "learning_rate": 4.554000000000001e-05, + "loss": 0.002, + "step": 2230 + }, + { + "epoch": 0.448, + "grad_norm": 0.02229149639606476, + "learning_rate": 4.5520000000000005e-05, + "loss": 0.0222, + "step": 2240 + }, + { + "epoch": 0.45, + "grad_norm": 0.8051292300224304, + "learning_rate": 4.55e-05, + "loss": 0.0921, + "step": 2250 + }, + { + "epoch": 0.452, + "grad_norm": 4.567664623260498, + "learning_rate": 4.548e-05, + "loss": 0.0898, + "step": 2260 + }, + { + "epoch": 0.454, + "grad_norm": 1.3188562393188477, + "learning_rate": 4.546e-05, + "loss": 0.059, + "step": 2270 + }, + { + "epoch": 0.456, + "grad_norm": 0.07389594614505768, + "learning_rate": 4.5440000000000005e-05, + "loss": 0.0776, + "step": 2280 + }, + { + "epoch": 0.458, + "grad_norm": 0.07509294152259827, + "learning_rate": 4.542e-05, + "loss": 0.0774, + "step": 2290 + }, + { + "epoch": 0.46, + "grad_norm": 0.020364753901958466, + "learning_rate": 4.5400000000000006e-05, + "loss": 0.0062, + "step": 2300 + }, + { + "epoch": 0.462, + "grad_norm": 0.08534001559019089, + "learning_rate": 4.538e-05, + "loss": 0.0177, + "step": 2310 + }, + { + "epoch": 0.464, + "grad_norm": 0.019280171021819115, + "learning_rate": 4.536e-05, + "loss": 0.002, + "step": 2320 + }, + { + "epoch": 0.466, + "grad_norm": 0.1653839647769928, + "learning_rate": 4.534e-05, + "loss": 0.0472, + "step": 2330 + }, + { + "epoch": 0.468, + "grad_norm": 0.01105203852057457, + "learning_rate": 4.532e-05, + "loss": 0.0281, + "step": 2340 + }, + { + "epoch": 0.47, + "grad_norm": 0.6273080110549927, + "learning_rate": 4.53e-05, + "loss": 0.0851, + "step": 2350 + }, + { + "epoch": 0.472, + "grad_norm": 0.04479006305336952, + "learning_rate": 4.528e-05, + "loss": 0.0011, + "step": 2360 + }, + { + "epoch": 0.474, + "grad_norm": 0.013826013542711735, + "learning_rate": 4.5260000000000004e-05, + "loss": 0.0055, + "step": 2370 + }, + { + "epoch": 0.476, + "grad_norm": 0.008165967650711536, + "learning_rate": 4.524000000000001e-05, + "loss": 0.0009, + "step": 2380 + }, + { + "epoch": 0.478, + "grad_norm": 0.09005433320999146, + "learning_rate": 4.5220000000000004e-05, + "loss": 0.0367, + "step": 2390 + }, + { + "epoch": 0.48, + "grad_norm": 0.059469930827617645, + "learning_rate": 4.52e-05, + "loss": 0.0509, + "step": 2400 + }, + { + "epoch": 0.482, + "grad_norm": 0.0066351089626550674, + "learning_rate": 4.518e-05, + "loss": 0.071, + "step": 2410 + }, + { + "epoch": 0.484, + "grad_norm": 6.272415637969971, + "learning_rate": 4.516e-05, + "loss": 0.0809, + "step": 2420 + }, + { + "epoch": 0.486, + "grad_norm": 0.011232190765440464, + "learning_rate": 4.5140000000000005e-05, + "loss": 0.049, + "step": 2430 + }, + { + "epoch": 0.488, + "grad_norm": 0.8765757083892822, + "learning_rate": 4.512e-05, + "loss": 0.0486, + "step": 2440 + }, + { + "epoch": 0.49, + "grad_norm": 0.014707114547491074, + "learning_rate": 4.5100000000000005e-05, + "loss": 0.0346, + "step": 2450 + }, + { + "epoch": 0.492, + "grad_norm": 0.02815588191151619, + "learning_rate": 4.508e-05, + "loss": 0.0285, + "step": 2460 + }, + { + "epoch": 0.494, + "grad_norm": 0.013969714753329754, + "learning_rate": 4.506e-05, + "loss": 0.0052, + "step": 2470 + }, + { + "epoch": 0.496, + "grad_norm": 0.030105378478765488, + "learning_rate": 4.504e-05, + "loss": 0.0329, + "step": 2480 + }, + { + "epoch": 0.498, + "grad_norm": 0.028038593009114265, + "learning_rate": 4.502e-05, + "loss": 0.0279, + "step": 2490 + }, + { + "epoch": 0.5, + "grad_norm": 0.09044764190912247, + "learning_rate": 4.5e-05, + "loss": 0.0314, + "step": 2500 + }, + { + "epoch": 0.502, + "grad_norm": 0.02895580418407917, + "learning_rate": 4.498e-05, + "loss": 0.0823, + "step": 2510 + }, + { + "epoch": 0.504, + "grad_norm": 0.004999326542019844, + "learning_rate": 4.496e-05, + "loss": 0.0015, + "step": 2520 + }, + { + "epoch": 0.506, + "grad_norm": 0.10353109240531921, + "learning_rate": 4.494000000000001e-05, + "loss": 0.0315, + "step": 2530 + }, + { + "epoch": 0.508, + "grad_norm": 1.6411314010620117, + "learning_rate": 4.4920000000000004e-05, + "loss": 0.0109, + "step": 2540 + }, + { + "epoch": 0.51, + "grad_norm": 0.11047064512968063, + "learning_rate": 4.49e-05, + "loss": 0.0227, + "step": 2550 + }, + { + "epoch": 0.512, + "grad_norm": 0.027552004903554916, + "learning_rate": 4.488e-05, + "loss": 0.0481, + "step": 2560 + }, + { + "epoch": 0.514, + "grad_norm": 0.2809949815273285, + "learning_rate": 4.486e-05, + "loss": 0.0016, + "step": 2570 + }, + { + "epoch": 0.516, + "grad_norm": 0.012851985171437263, + "learning_rate": 4.4840000000000004e-05, + "loss": 0.0014, + "step": 2580 + }, + { + "epoch": 0.518, + "grad_norm": 5.742981910705566, + "learning_rate": 4.482e-05, + "loss": 0.0187, + "step": 2590 + }, + { + "epoch": 0.52, + "grad_norm": 0.013996957801282406, + "learning_rate": 4.4800000000000005e-05, + "loss": 0.0005, + "step": 2600 + }, + { + "epoch": 0.522, + "grad_norm": 0.1463635265827179, + "learning_rate": 4.478e-05, + "loss": 0.0517, + "step": 2610 + }, + { + "epoch": 0.524, + "grad_norm": 0.06374786049127579, + "learning_rate": 4.4760000000000005e-05, + "loss": 0.0455, + "step": 2620 + }, + { + "epoch": 0.526, + "grad_norm": 1.9763230085372925, + "learning_rate": 4.474e-05, + "loss": 0.0811, + "step": 2630 + }, + { + "epoch": 0.528, + "grad_norm": 0.0060223303735256195, + "learning_rate": 4.472e-05, + "loss": 0.0037, + "step": 2640 + }, + { + "epoch": 0.53, + "grad_norm": 3.1282496452331543, + "learning_rate": 4.47e-05, + "loss": 0.028, + "step": 2650 + }, + { + "epoch": 0.532, + "grad_norm": 0.04501995071768761, + "learning_rate": 4.468e-05, + "loss": 0.0634, + "step": 2660 + }, + { + "epoch": 0.534, + "grad_norm": 0.9025501012802124, + "learning_rate": 4.466e-05, + "loss": 0.0253, + "step": 2670 + }, + { + "epoch": 0.536, + "grad_norm": 0.013647008687257767, + "learning_rate": 4.4640000000000006e-05, + "loss": 0.0048, + "step": 2680 + }, + { + "epoch": 0.538, + "grad_norm": 0.016055762767791748, + "learning_rate": 4.462e-05, + "loss": 0.0005, + "step": 2690 + }, + { + "epoch": 0.54, + "grad_norm": 0.009782964363694191, + "learning_rate": 4.46e-05, + "loss": 0.0082, + "step": 2700 + }, + { + "epoch": 0.542, + "grad_norm": 0.008022104389965534, + "learning_rate": 4.458e-05, + "loss": 0.0188, + "step": 2710 + }, + { + "epoch": 0.544, + "grad_norm": 0.0033532867673784494, + "learning_rate": 4.456e-05, + "loss": 0.0039, + "step": 2720 + }, + { + "epoch": 0.546, + "grad_norm": 0.08850495517253876, + "learning_rate": 4.4540000000000004e-05, + "loss": 0.0042, + "step": 2730 + }, + { + "epoch": 0.548, + "grad_norm": 3.59587025642395, + "learning_rate": 4.452e-05, + "loss": 0.0495, + "step": 2740 + }, + { + "epoch": 0.55, + "grad_norm": 5.187063217163086, + "learning_rate": 4.4500000000000004e-05, + "loss": 0.0632, + "step": 2750 + }, + { + "epoch": 0.552, + "grad_norm": 0.005760591477155685, + "learning_rate": 4.448e-05, + "loss": 0.0732, + "step": 2760 + }, + { + "epoch": 0.554, + "grad_norm": 0.11296861618757248, + "learning_rate": 4.4460000000000005e-05, + "loss": 0.0331, + "step": 2770 + }, + { + "epoch": 0.556, + "grad_norm": 0.02798079513013363, + "learning_rate": 4.444e-05, + "loss": 0.0484, + "step": 2780 + }, + { + "epoch": 0.558, + "grad_norm": 0.11013288795948029, + "learning_rate": 4.442e-05, + "loss": 0.0033, + "step": 2790 + }, + { + "epoch": 0.56, + "grad_norm": 4.912937164306641, + "learning_rate": 4.44e-05, + "loss": 0.0248, + "step": 2800 + }, + { + "epoch": 0.562, + "grad_norm": 8.702404022216797, + "learning_rate": 4.438e-05, + "loss": 0.0565, + "step": 2810 + }, + { + "epoch": 0.564, + "grad_norm": 0.01780632883310318, + "learning_rate": 4.436e-05, + "loss": 0.0085, + "step": 2820 + }, + { + "epoch": 0.566, + "grad_norm": 0.008804120123386383, + "learning_rate": 4.4340000000000006e-05, + "loss": 0.0498, + "step": 2830 + }, + { + "epoch": 0.568, + "grad_norm": 0.02988666482269764, + "learning_rate": 4.432e-05, + "loss": 0.0482, + "step": 2840 + }, + { + "epoch": 0.57, + "grad_norm": 0.0162635650485754, + "learning_rate": 4.43e-05, + "loss": 0.0564, + "step": 2850 + }, + { + "epoch": 0.572, + "grad_norm": 0.15490345656871796, + "learning_rate": 4.428e-05, + "loss": 0.0328, + "step": 2860 + }, + { + "epoch": 0.574, + "grad_norm": 0.017897745594382286, + "learning_rate": 4.426e-05, + "loss": 0.0025, + "step": 2870 + }, + { + "epoch": 0.576, + "grad_norm": 0.2046775221824646, + "learning_rate": 4.424e-05, + "loss": 0.0018, + "step": 2880 + }, + { + "epoch": 0.578, + "grad_norm": 0.097975954413414, + "learning_rate": 4.422e-05, + "loss": 0.0231, + "step": 2890 + }, + { + "epoch": 0.58, + "grad_norm": 0.050555840134620667, + "learning_rate": 4.4200000000000004e-05, + "loss": 0.0005, + "step": 2900 + }, + { + "epoch": 0.582, + "grad_norm": 0.006744217127561569, + "learning_rate": 4.418000000000001e-05, + "loss": 0.0333, + "step": 2910 + }, + { + "epoch": 0.584, + "grad_norm": 0.11287890374660492, + "learning_rate": 4.4160000000000004e-05, + "loss": 0.0361, + "step": 2920 + }, + { + "epoch": 0.586, + "grad_norm": 0.03264043852686882, + "learning_rate": 4.414e-05, + "loss": 0.0316, + "step": 2930 + }, + { + "epoch": 0.588, + "grad_norm": 0.004322608467191458, + "learning_rate": 4.412e-05, + "loss": 0.0549, + "step": 2940 + }, + { + "epoch": 0.59, + "grad_norm": 0.09659099578857422, + "learning_rate": 4.41e-05, + "loss": 0.094, + "step": 2950 + }, + { + "epoch": 0.592, + "grad_norm": 0.06353981792926788, + "learning_rate": 4.4080000000000005e-05, + "loss": 0.0231, + "step": 2960 + }, + { + "epoch": 0.594, + "grad_norm": 0.0059261200949549675, + "learning_rate": 4.406e-05, + "loss": 0.0008, + "step": 2970 + }, + { + "epoch": 0.596, + "grad_norm": 0.09767893701791763, + "learning_rate": 4.4040000000000005e-05, + "loss": 0.0234, + "step": 2980 + }, + { + "epoch": 0.598, + "grad_norm": 0.005928624887019396, + "learning_rate": 4.402e-05, + "loss": 0.0241, + "step": 2990 + }, + { + "epoch": 0.6, + "grad_norm": 0.0034908633679151535, + "learning_rate": 4.4000000000000006e-05, + "loss": 0.004, + "step": 3000 + }, + { + "epoch": 0.602, + "grad_norm": 0.18380242586135864, + "learning_rate": 4.398e-05, + "loss": 0.0699, + "step": 3010 + }, + { + "epoch": 0.604, + "grad_norm": 0.013765583746135235, + "learning_rate": 4.396e-05, + "loss": 0.0647, + "step": 3020 + }, + { + "epoch": 0.606, + "grad_norm": 0.2933019995689392, + "learning_rate": 4.394e-05, + "loss": 0.1148, + "step": 3030 + }, + { + "epoch": 0.608, + "grad_norm": 0.025059092789888382, + "learning_rate": 4.392e-05, + "loss": 0.0013, + "step": 3040 + }, + { + "epoch": 0.61, + "grad_norm": 0.025903893634676933, + "learning_rate": 4.39e-05, + "loss": 0.0101, + "step": 3050 + }, + { + "epoch": 0.612, + "grad_norm": 0.02233230695128441, + "learning_rate": 4.388000000000001e-05, + "loss": 0.0929, + "step": 3060 + }, + { + "epoch": 0.614, + "grad_norm": 0.041354354470968246, + "learning_rate": 4.3860000000000004e-05, + "loss": 0.0291, + "step": 3070 + }, + { + "epoch": 0.616, + "grad_norm": 0.004411335568875074, + "learning_rate": 4.384e-05, + "loss": 0.0314, + "step": 3080 + }, + { + "epoch": 0.618, + "grad_norm": 0.05570805072784424, + "learning_rate": 4.382e-05, + "loss": 0.001, + "step": 3090 + }, + { + "epoch": 0.62, + "grad_norm": 2.777430772781372, + "learning_rate": 4.38e-05, + "loss": 0.1514, + "step": 3100 + }, + { + "epoch": 0.622, + "grad_norm": 4.803562164306641, + "learning_rate": 4.3780000000000004e-05, + "loss": 0.0401, + "step": 3110 + }, + { + "epoch": 0.624, + "grad_norm": 0.07038727402687073, + "learning_rate": 4.376e-05, + "loss": 0.0186, + "step": 3120 + }, + { + "epoch": 0.626, + "grad_norm": 0.024878088384866714, + "learning_rate": 4.3740000000000005e-05, + "loss": 0.0644, + "step": 3130 + }, + { + "epoch": 0.628, + "grad_norm": 0.010944769717752934, + "learning_rate": 4.372e-05, + "loss": 0.0191, + "step": 3140 + }, + { + "epoch": 0.63, + "grad_norm": 0.01699189841747284, + "learning_rate": 4.3700000000000005e-05, + "loss": 0.0154, + "step": 3150 + }, + { + "epoch": 0.632, + "grad_norm": 4.613604545593262, + "learning_rate": 4.368e-05, + "loss": 0.0147, + "step": 3160 + }, + { + "epoch": 0.634, + "grad_norm": 0.010693288408219814, + "learning_rate": 4.366e-05, + "loss": 0.0008, + "step": 3170 + }, + { + "epoch": 0.636, + "grad_norm": 0.005737672559916973, + "learning_rate": 4.364e-05, + "loss": 0.002, + "step": 3180 + }, + { + "epoch": 0.638, + "grad_norm": 0.004944519139826298, + "learning_rate": 4.362e-05, + "loss": 0.0013, + "step": 3190 + }, + { + "epoch": 0.64, + "grad_norm": 0.0188067015260458, + "learning_rate": 4.36e-05, + "loss": 0.0012, + "step": 3200 + }, + { + "epoch": 0.642, + "grad_norm": 0.0032054812181741, + "learning_rate": 4.3580000000000006e-05, + "loss": 0.0145, + "step": 3210 + }, + { + "epoch": 0.644, + "grad_norm": 0.13175809383392334, + "learning_rate": 4.356e-05, + "loss": 0.0392, + "step": 3220 + }, + { + "epoch": 0.646, + "grad_norm": 4.678634166717529, + "learning_rate": 4.354e-05, + "loss": 0.0444, + "step": 3230 + }, + { + "epoch": 0.648, + "grad_norm": 9.381738662719727, + "learning_rate": 4.352e-05, + "loss": 0.0287, + "step": 3240 + }, + { + "epoch": 0.65, + "grad_norm": 0.00795825943350792, + "learning_rate": 4.35e-05, + "loss": 0.0803, + "step": 3250 + }, + { + "epoch": 0.652, + "grad_norm": 0.04332546144723892, + "learning_rate": 4.3480000000000004e-05, + "loss": 0.0154, + "step": 3260 + }, + { + "epoch": 0.654, + "grad_norm": 0.2564730644226074, + "learning_rate": 4.346e-05, + "loss": 0.0476, + "step": 3270 + }, + { + "epoch": 0.656, + "grad_norm": 3.7965617179870605, + "learning_rate": 4.3440000000000004e-05, + "loss": 0.1515, + "step": 3280 + }, + { + "epoch": 0.658, + "grad_norm": 0.0158467348664999, + "learning_rate": 4.342e-05, + "loss": 0.0529, + "step": 3290 + }, + { + "epoch": 0.66, + "grad_norm": 0.6378382444381714, + "learning_rate": 4.3400000000000005e-05, + "loss": 0.0225, + "step": 3300 + }, + { + "epoch": 0.662, + "grad_norm": 3.4232382774353027, + "learning_rate": 4.338e-05, + "loss": 0.0941, + "step": 3310 + }, + { + "epoch": 0.664, + "grad_norm": 0.0431983545422554, + "learning_rate": 4.336e-05, + "loss": 0.0196, + "step": 3320 + }, + { + "epoch": 0.666, + "grad_norm": 0.15850763022899628, + "learning_rate": 4.334e-05, + "loss": 0.0229, + "step": 3330 + }, + { + "epoch": 0.668, + "grad_norm": 0.6902371644973755, + "learning_rate": 4.332e-05, + "loss": 0.0387, + "step": 3340 + }, + { + "epoch": 0.67, + "grad_norm": 0.028398212045431137, + "learning_rate": 4.33e-05, + "loss": 0.0053, + "step": 3350 + }, + { + "epoch": 0.672, + "grad_norm": 0.048552609980106354, + "learning_rate": 4.3280000000000006e-05, + "loss": 0.0017, + "step": 3360 + }, + { + "epoch": 0.674, + "grad_norm": 0.011205868795514107, + "learning_rate": 4.326e-05, + "loss": 0.002, + "step": 3370 + }, + { + "epoch": 0.676, + "grad_norm": 6.537700653076172, + "learning_rate": 4.324e-05, + "loss": 0.0238, + "step": 3380 + }, + { + "epoch": 0.678, + "grad_norm": 0.03194591403007507, + "learning_rate": 4.3219999999999996e-05, + "loss": 0.0004, + "step": 3390 + }, + { + "epoch": 0.68, + "grad_norm": 0.04188118129968643, + "learning_rate": 4.32e-05, + "loss": 0.0028, + "step": 3400 + }, + { + "epoch": 0.682, + "grad_norm": 1.9461021423339844, + "learning_rate": 4.318e-05, + "loss": 0.0039, + "step": 3410 + }, + { + "epoch": 0.684, + "grad_norm": 0.028302855789661407, + "learning_rate": 4.316e-05, + "loss": 0.0921, + "step": 3420 + }, + { + "epoch": 0.686, + "grad_norm": 0.06954790651798248, + "learning_rate": 4.3140000000000004e-05, + "loss": 0.0018, + "step": 3430 + }, + { + "epoch": 0.688, + "grad_norm": 0.9867027401924133, + "learning_rate": 4.312000000000001e-05, + "loss": 0.0037, + "step": 3440 + }, + { + "epoch": 0.69, + "grad_norm": 0.005265855696052313, + "learning_rate": 4.3100000000000004e-05, + "loss": 0.026, + "step": 3450 + }, + { + "epoch": 0.692, + "grad_norm": 0.6946307420730591, + "learning_rate": 4.308e-05, + "loss": 0.061, + "step": 3460 + }, + { + "epoch": 0.694, + "grad_norm": 0.06465889513492584, + "learning_rate": 4.306e-05, + "loss": 0.0023, + "step": 3470 + }, + { + "epoch": 0.696, + "grad_norm": 0.7860224843025208, + "learning_rate": 4.304e-05, + "loss": 0.0055, + "step": 3480 + }, + { + "epoch": 0.698, + "grad_norm": 0.04062548652291298, + "learning_rate": 4.3020000000000005e-05, + "loss": 0.0454, + "step": 3490 + }, + { + "epoch": 0.7, + "grad_norm": 0.011811433359980583, + "learning_rate": 4.3e-05, + "loss": 0.0151, + "step": 3500 + }, + { + "epoch": 0.702, + "grad_norm": 2.8018949031829834, + "learning_rate": 4.2980000000000005e-05, + "loss": 0.0043, + "step": 3510 + }, + { + "epoch": 0.704, + "grad_norm": 0.6071522831916809, + "learning_rate": 4.296e-05, + "loss": 0.0011, + "step": 3520 + }, + { + "epoch": 0.706, + "grad_norm": 0.032536111772060394, + "learning_rate": 4.2940000000000006e-05, + "loss": 0.0034, + "step": 3530 + }, + { + "epoch": 0.708, + "grad_norm": 0.006422733422368765, + "learning_rate": 4.292e-05, + "loss": 0.0019, + "step": 3540 + }, + { + "epoch": 0.71, + "grad_norm": 4.8893818855285645, + "learning_rate": 4.29e-05, + "loss": 0.0786, + "step": 3550 + }, + { + "epoch": 0.712, + "grad_norm": 0.009687143377959728, + "learning_rate": 4.288e-05, + "loss": 0.0002, + "step": 3560 + }, + { + "epoch": 0.714, + "grad_norm": 0.02869969606399536, + "learning_rate": 4.286e-05, + "loss": 0.0818, + "step": 3570 + }, + { + "epoch": 0.716, + "grad_norm": 0.02002692222595215, + "learning_rate": 4.284e-05, + "loss": 0.0002, + "step": 3580 + }, + { + "epoch": 0.718, + "grad_norm": 0.12229043245315552, + "learning_rate": 4.282000000000001e-05, + "loss": 0.0034, + "step": 3590 + }, + { + "epoch": 0.72, + "grad_norm": 0.9271475076675415, + "learning_rate": 4.2800000000000004e-05, + "loss": 0.0228, + "step": 3600 + }, + { + "epoch": 0.722, + "grad_norm": 0.015138584189116955, + "learning_rate": 4.278e-05, + "loss": 0.1131, + "step": 3610 + }, + { + "epoch": 0.724, + "grad_norm": 0.028255993500351906, + "learning_rate": 4.276e-05, + "loss": 0.0303, + "step": 3620 + }, + { + "epoch": 0.726, + "grad_norm": 0.03335114195942879, + "learning_rate": 4.274e-05, + "loss": 0.0049, + "step": 3630 + }, + { + "epoch": 0.728, + "grad_norm": 0.005619376432150602, + "learning_rate": 4.2720000000000004e-05, + "loss": 0.0018, + "step": 3640 + }, + { + "epoch": 0.73, + "grad_norm": 7.768834114074707, + "learning_rate": 4.27e-05, + "loss": 0.0237, + "step": 3650 + }, + { + "epoch": 0.732, + "grad_norm": 0.013384620659053326, + "learning_rate": 4.2680000000000005e-05, + "loss": 0.0041, + "step": 3660 + }, + { + "epoch": 0.734, + "grad_norm": 0.02822156995534897, + "learning_rate": 4.266e-05, + "loss": 0.0932, + "step": 3670 + }, + { + "epoch": 0.736, + "grad_norm": 0.006292347330600023, + "learning_rate": 4.2640000000000005e-05, + "loss": 0.0019, + "step": 3680 + }, + { + "epoch": 0.738, + "grad_norm": 0.04632676765322685, + "learning_rate": 4.262e-05, + "loss": 0.0387, + "step": 3690 + }, + { + "epoch": 0.74, + "grad_norm": 0.011406010948121548, + "learning_rate": 4.26e-05, + "loss": 0.0383, + "step": 3700 + }, + { + "epoch": 0.742, + "grad_norm": 0.015299669466912746, + "learning_rate": 4.258e-05, + "loss": 0.0507, + "step": 3710 + }, + { + "epoch": 0.744, + "grad_norm": 0.002912426833063364, + "learning_rate": 4.256e-05, + "loss": 0.0136, + "step": 3720 + }, + { + "epoch": 0.746, + "grad_norm": 7.015591144561768, + "learning_rate": 4.254e-05, + "loss": 0.0677, + "step": 3730 + }, + { + "epoch": 0.748, + "grad_norm": 0.029005423188209534, + "learning_rate": 4.2520000000000006e-05, + "loss": 0.048, + "step": 3740 + }, + { + "epoch": 0.75, + "grad_norm": 0.006496680434793234, + "learning_rate": 4.25e-05, + "loss": 0.0174, + "step": 3750 + }, + { + "epoch": 0.752, + "grad_norm": 0.005983166862279177, + "learning_rate": 4.248e-05, + "loss": 0.0023, + "step": 3760 + }, + { + "epoch": 0.754, + "grad_norm": 5.252658843994141, + "learning_rate": 4.246e-05, + "loss": 0.0902, + "step": 3770 + }, + { + "epoch": 0.756, + "grad_norm": 0.008069785311818123, + "learning_rate": 4.244e-05, + "loss": 0.004, + "step": 3780 + }, + { + "epoch": 0.758, + "grad_norm": 2.6544532775878906, + "learning_rate": 4.2420000000000004e-05, + "loss": 0.057, + "step": 3790 + }, + { + "epoch": 0.76, + "grad_norm": 0.013155023567378521, + "learning_rate": 4.24e-05, + "loss": 0.0066, + "step": 3800 + }, + { + "epoch": 0.762, + "grad_norm": 4.488767623901367, + "learning_rate": 4.2380000000000004e-05, + "loss": 0.0157, + "step": 3810 + }, + { + "epoch": 0.764, + "grad_norm": 0.006404740270227194, + "learning_rate": 4.236e-05, + "loss": 0.0496, + "step": 3820 + }, + { + "epoch": 0.766, + "grad_norm": 0.027893809601664543, + "learning_rate": 4.2340000000000005e-05, + "loss": 0.0393, + "step": 3830 + }, + { + "epoch": 0.768, + "grad_norm": 0.2745389938354492, + "learning_rate": 4.232e-05, + "loss": 0.0013, + "step": 3840 + }, + { + "epoch": 0.77, + "grad_norm": 5.9310197830200195, + "learning_rate": 4.23e-05, + "loss": 0.0727, + "step": 3850 + }, + { + "epoch": 0.772, + "grad_norm": 0.014957119710743427, + "learning_rate": 4.228e-05, + "loss": 0.0184, + "step": 3860 + }, + { + "epoch": 0.774, + "grad_norm": 0.047319989651441574, + "learning_rate": 4.226e-05, + "loss": 0.038, + "step": 3870 + }, + { + "epoch": 0.776, + "grad_norm": 0.0042962199077010155, + "learning_rate": 4.224e-05, + "loss": 0.0041, + "step": 3880 + }, + { + "epoch": 0.778, + "grad_norm": 0.0794585719704628, + "learning_rate": 4.2220000000000006e-05, + "loss": 0.0008, + "step": 3890 + }, + { + "epoch": 0.78, + "grad_norm": 0.015754858031868935, + "learning_rate": 4.22e-05, + "loss": 0.0148, + "step": 3900 + }, + { + "epoch": 0.782, + "grad_norm": 0.020532747730612755, + "learning_rate": 4.2180000000000006e-05, + "loss": 0.0345, + "step": 3910 + }, + { + "epoch": 0.784, + "grad_norm": 0.0342375785112381, + "learning_rate": 4.2159999999999996e-05, + "loss": 0.0027, + "step": 3920 + }, + { + "epoch": 0.786, + "grad_norm": 0.4745217561721802, + "learning_rate": 4.214e-05, + "loss": 0.0126, + "step": 3930 + }, + { + "epoch": 0.788, + "grad_norm": 0.0014779713237658143, + "learning_rate": 4.212e-05, + "loss": 0.1061, + "step": 3940 + }, + { + "epoch": 0.79, + "grad_norm": 0.043706584721803665, + "learning_rate": 4.21e-05, + "loss": 0.0195, + "step": 3950 + }, + { + "epoch": 0.792, + "grad_norm": 0.04930654540657997, + "learning_rate": 4.2080000000000004e-05, + "loss": 0.0029, + "step": 3960 + }, + { + "epoch": 0.794, + "grad_norm": 0.025867952033877373, + "learning_rate": 4.206e-05, + "loss": 0.0709, + "step": 3970 + }, + { + "epoch": 0.796, + "grad_norm": 0.6700100898742676, + "learning_rate": 4.2040000000000004e-05, + "loss": 0.0149, + "step": 3980 + }, + { + "epoch": 0.798, + "grad_norm": 0.0404072068631649, + "learning_rate": 4.202e-05, + "loss": 0.0251, + "step": 3990 + }, + { + "epoch": 0.8, + "grad_norm": 0.008657311089336872, + "learning_rate": 4.2e-05, + "loss": 0.0163, + "step": 4000 + }, + { + "epoch": 0.802, + "grad_norm": 0.02054884470999241, + "learning_rate": 4.198e-05, + "loss": 0.0374, + "step": 4010 + }, + { + "epoch": 0.804, + "grad_norm": 0.01718149520456791, + "learning_rate": 4.196e-05, + "loss": 0.0288, + "step": 4020 + }, + { + "epoch": 0.806, + "grad_norm": 0.05612902343273163, + "learning_rate": 4.194e-05, + "loss": 0.0184, + "step": 4030 + }, + { + "epoch": 0.808, + "grad_norm": 0.009326531551778316, + "learning_rate": 4.1920000000000005e-05, + "loss": 0.0023, + "step": 4040 + }, + { + "epoch": 0.81, + "grad_norm": 0.09644528478384018, + "learning_rate": 4.19e-05, + "loss": 0.0184, + "step": 4050 + }, + { + "epoch": 0.812, + "grad_norm": 0.011878632009029388, + "learning_rate": 4.1880000000000006e-05, + "loss": 0.0311, + "step": 4060 + }, + { + "epoch": 0.814, + "grad_norm": 0.014384951442480087, + "learning_rate": 4.186e-05, + "loss": 0.0041, + "step": 4070 + }, + { + "epoch": 0.816, + "grad_norm": 5.182435035705566, + "learning_rate": 4.184e-05, + "loss": 0.0216, + "step": 4080 + }, + { + "epoch": 0.818, + "grad_norm": 5.221963882446289, + "learning_rate": 4.182e-05, + "loss": 0.051, + "step": 4090 + }, + { + "epoch": 0.82, + "grad_norm": 0.01110916119068861, + "learning_rate": 4.18e-05, + "loss": 0.0223, + "step": 4100 + }, + { + "epoch": 0.822, + "grad_norm": 0.062016647309064865, + "learning_rate": 4.178e-05, + "loss": 0.0102, + "step": 4110 + }, + { + "epoch": 0.824, + "grad_norm": 0.6081287860870361, + "learning_rate": 4.176000000000001e-05, + "loss": 0.0168, + "step": 4120 + }, + { + "epoch": 0.826, + "grad_norm": 0.010736220516264439, + "learning_rate": 4.1740000000000004e-05, + "loss": 0.0153, + "step": 4130 + }, + { + "epoch": 0.828, + "grad_norm": 0.03323579207062721, + "learning_rate": 4.172e-05, + "loss": 0.0224, + "step": 4140 + }, + { + "epoch": 0.83, + "grad_norm": 0.7267991304397583, + "learning_rate": 4.17e-05, + "loss": 0.0033, + "step": 4150 + }, + { + "epoch": 0.832, + "grad_norm": 0.029643530026078224, + "learning_rate": 4.168e-05, + "loss": 0.0077, + "step": 4160 + }, + { + "epoch": 0.834, + "grad_norm": 0.014343446120619774, + "learning_rate": 4.1660000000000004e-05, + "loss": 0.0025, + "step": 4170 + }, + { + "epoch": 0.836, + "grad_norm": 0.007818669080734253, + "learning_rate": 4.164e-05, + "loss": 0.0254, + "step": 4180 + }, + { + "epoch": 0.838, + "grad_norm": 0.004449554719030857, + "learning_rate": 4.1620000000000005e-05, + "loss": 0.0005, + "step": 4190 + }, + { + "epoch": 0.84, + "grad_norm": 0.005323763936758041, + "learning_rate": 4.16e-05, + "loss": 0.0761, + "step": 4200 + }, + { + "epoch": 0.842, + "grad_norm": 0.17715303599834442, + "learning_rate": 4.1580000000000005e-05, + "loss": 0.0098, + "step": 4210 + }, + { + "epoch": 0.844, + "grad_norm": 0.013948998413980007, + "learning_rate": 4.156e-05, + "loss": 0.0105, + "step": 4220 + }, + { + "epoch": 0.846, + "grad_norm": 0.029932260513305664, + "learning_rate": 4.154e-05, + "loss": 0.0022, + "step": 4230 + }, + { + "epoch": 0.848, + "grad_norm": 5.534884929656982, + "learning_rate": 4.152e-05, + "loss": 0.0178, + "step": 4240 + }, + { + "epoch": 0.85, + "grad_norm": 2.4610676765441895, + "learning_rate": 4.15e-05, + "loss": 0.0397, + "step": 4250 + }, + { + "epoch": 0.852, + "grad_norm": 0.00479121832177043, + "learning_rate": 4.148e-05, + "loss": 0.0056, + "step": 4260 + }, + { + "epoch": 0.854, + "grad_norm": 0.4627273976802826, + "learning_rate": 4.1460000000000006e-05, + "loss": 0.0665, + "step": 4270 + }, + { + "epoch": 0.856, + "grad_norm": 0.022942064329981804, + "learning_rate": 4.144e-05, + "loss": 0.0586, + "step": 4280 + }, + { + "epoch": 0.858, + "grad_norm": 0.0016935011371970177, + "learning_rate": 4.142000000000001e-05, + "loss": 0.0358, + "step": 4290 + }, + { + "epoch": 0.86, + "grad_norm": 4.48838472366333, + "learning_rate": 4.14e-05, + "loss": 0.0626, + "step": 4300 + }, + { + "epoch": 0.862, + "grad_norm": 0.0164650809019804, + "learning_rate": 4.138e-05, + "loss": 0.0551, + "step": 4310 + }, + { + "epoch": 0.864, + "grad_norm": 0.012818397022783756, + "learning_rate": 4.1360000000000004e-05, + "loss": 0.0031, + "step": 4320 + }, + { + "epoch": 0.866, + "grad_norm": 0.02281295880675316, + "learning_rate": 4.134e-05, + "loss": 0.0713, + "step": 4330 + }, + { + "epoch": 0.868, + "grad_norm": 0.37325066328048706, + "learning_rate": 4.1320000000000004e-05, + "loss": 0.0345, + "step": 4340 + }, + { + "epoch": 0.87, + "grad_norm": 0.022145600989460945, + "learning_rate": 4.13e-05, + "loss": 0.0285, + "step": 4350 + }, + { + "epoch": 0.872, + "grad_norm": 0.006575500126928091, + "learning_rate": 4.1280000000000005e-05, + "loss": 0.0099, + "step": 4360 + }, + { + "epoch": 0.874, + "grad_norm": 0.03781846538186073, + "learning_rate": 4.126e-05, + "loss": 0.0673, + "step": 4370 + }, + { + "epoch": 0.876, + "grad_norm": 0.004571052268147469, + "learning_rate": 4.124e-05, + "loss": 0.015, + "step": 4380 + }, + { + "epoch": 0.878, + "grad_norm": 0.01689932681620121, + "learning_rate": 4.122e-05, + "loss": 0.0311, + "step": 4390 + }, + { + "epoch": 0.88, + "grad_norm": 0.06380756199359894, + "learning_rate": 4.12e-05, + "loss": 0.0041, + "step": 4400 + }, + { + "epoch": 0.882, + "grad_norm": 9.5936279296875, + "learning_rate": 4.118e-05, + "loss": 0.0814, + "step": 4410 + }, + { + "epoch": 0.884, + "grad_norm": 0.013598248362541199, + "learning_rate": 4.1160000000000006e-05, + "loss": 0.0174, + "step": 4420 + }, + { + "epoch": 0.886, + "grad_norm": 0.0035734116099774837, + "learning_rate": 4.114e-05, + "loss": 0.141, + "step": 4430 + }, + { + "epoch": 0.888, + "grad_norm": 0.025886977091431618, + "learning_rate": 4.1120000000000006e-05, + "loss": 0.0285, + "step": 4440 + }, + { + "epoch": 0.89, + "grad_norm": 4.5155487060546875, + "learning_rate": 4.11e-05, + "loss": 0.0211, + "step": 4450 + }, + { + "epoch": 0.892, + "grad_norm": 0.05487843602895737, + "learning_rate": 4.108e-05, + "loss": 0.0241, + "step": 4460 + }, + { + "epoch": 0.894, + "grad_norm": 0.013455720618367195, + "learning_rate": 4.106e-05, + "loss": 0.036, + "step": 4470 + }, + { + "epoch": 0.896, + "grad_norm": 0.012445429340004921, + "learning_rate": 4.104e-05, + "loss": 0.0015, + "step": 4480 + }, + { + "epoch": 0.898, + "grad_norm": 0.07584936916828156, + "learning_rate": 4.1020000000000004e-05, + "loss": 0.064, + "step": 4490 + }, + { + "epoch": 0.9, + "grad_norm": 0.004590052645653486, + "learning_rate": 4.1e-05, + "loss": 0.0007, + "step": 4500 + }, + { + "epoch": 0.902, + "grad_norm": 0.03764956444501877, + "learning_rate": 4.0980000000000004e-05, + "loss": 0.0008, + "step": 4510 + }, + { + "epoch": 0.904, + "grad_norm": 0.019499754533171654, + "learning_rate": 4.096e-05, + "loss": 0.0052, + "step": 4520 + }, + { + "epoch": 0.906, + "grad_norm": 0.004517500754445791, + "learning_rate": 4.094e-05, + "loss": 0.0004, + "step": 4530 + }, + { + "epoch": 0.908, + "grad_norm": 0.007550814189016819, + "learning_rate": 4.092e-05, + "loss": 0.0202, + "step": 4540 + }, + { + "epoch": 0.91, + "grad_norm": 0.00612619798630476, + "learning_rate": 4.09e-05, + "loss": 0.035, + "step": 4550 + }, + { + "epoch": 0.912, + "grad_norm": 0.004684742074459791, + "learning_rate": 4.088e-05, + "loss": 0.0004, + "step": 4560 + }, + { + "epoch": 0.914, + "grad_norm": 0.0021801223047077656, + "learning_rate": 4.0860000000000005e-05, + "loss": 0.0505, + "step": 4570 + }, + { + "epoch": 0.916, + "grad_norm": 0.06538788974285126, + "learning_rate": 4.084e-05, + "loss": 0.0037, + "step": 4580 + }, + { + "epoch": 0.918, + "grad_norm": 0.007073140237480402, + "learning_rate": 4.0820000000000006e-05, + "loss": 0.1086, + "step": 4590 + }, + { + "epoch": 0.92, + "grad_norm": 4.945827484130859, + "learning_rate": 4.08e-05, + "loss": 0.021, + "step": 4600 + }, + { + "epoch": 0.922, + "grad_norm": 0.04866447672247887, + "learning_rate": 4.078e-05, + "loss": 0.0444, + "step": 4610 + }, + { + "epoch": 0.924, + "grad_norm": 0.004719861783087254, + "learning_rate": 4.076e-05, + "loss": 0.0752, + "step": 4620 + }, + { + "epoch": 0.926, + "grad_norm": 0.8237816095352173, + "learning_rate": 4.074e-05, + "loss": 0.0286, + "step": 4630 + }, + { + "epoch": 0.928, + "grad_norm": 0.12901705503463745, + "learning_rate": 4.072e-05, + "loss": 0.0099, + "step": 4640 + }, + { + "epoch": 0.93, + "grad_norm": 0.03178408369421959, + "learning_rate": 4.07e-05, + "loss": 0.032, + "step": 4650 + }, + { + "epoch": 0.932, + "grad_norm": 0.04729364439845085, + "learning_rate": 4.0680000000000004e-05, + "loss": 0.0386, + "step": 4660 + }, + { + "epoch": 0.934, + "grad_norm": 0.022903740406036377, + "learning_rate": 4.066e-05, + "loss": 0.0175, + "step": 4670 + }, + { + "epoch": 0.936, + "grad_norm": 0.006164549384266138, + "learning_rate": 4.064e-05, + "loss": 0.0022, + "step": 4680 + }, + { + "epoch": 0.938, + "grad_norm": 0.004589389078319073, + "learning_rate": 4.062e-05, + "loss": 0.066, + "step": 4690 + }, + { + "epoch": 0.94, + "grad_norm": 0.012154348194599152, + "learning_rate": 4.0600000000000004e-05, + "loss": 0.0731, + "step": 4700 + }, + { + "epoch": 0.942, + "grad_norm": 6.234540939331055, + "learning_rate": 4.058e-05, + "loss": 0.0222, + "step": 4710 + }, + { + "epoch": 0.944, + "grad_norm": 0.17935433983802795, + "learning_rate": 4.0560000000000005e-05, + "loss": 0.0019, + "step": 4720 + }, + { + "epoch": 0.946, + "grad_norm": 0.21610985696315765, + "learning_rate": 4.054e-05, + "loss": 0.0588, + "step": 4730 + }, + { + "epoch": 0.948, + "grad_norm": 0.010893936268985271, + "learning_rate": 4.0520000000000005e-05, + "loss": 0.0124, + "step": 4740 + }, + { + "epoch": 0.95, + "grad_norm": 0.027378089725971222, + "learning_rate": 4.05e-05, + "loss": 0.0012, + "step": 4750 + }, + { + "epoch": 0.952, + "grad_norm": 0.19339287281036377, + "learning_rate": 4.048e-05, + "loss": 0.0696, + "step": 4760 + }, + { + "epoch": 0.954, + "grad_norm": 3.2441658973693848, + "learning_rate": 4.046e-05, + "loss": 0.041, + "step": 4770 + }, + { + "epoch": 0.956, + "grad_norm": 0.012117883190512657, + "learning_rate": 4.044e-05, + "loss": 0.0396, + "step": 4780 + }, + { + "epoch": 0.958, + "grad_norm": 0.018160009756684303, + "learning_rate": 4.042e-05, + "loss": 0.0009, + "step": 4790 + }, + { + "epoch": 0.96, + "grad_norm": 0.005018259398639202, + "learning_rate": 4.0400000000000006e-05, + "loss": 0.0006, + "step": 4800 + }, + { + "epoch": 0.962, + "grad_norm": 1.8187236785888672, + "learning_rate": 4.038e-05, + "loss": 0.0034, + "step": 4810 + }, + { + "epoch": 0.964, + "grad_norm": 0.01619546115398407, + "learning_rate": 4.0360000000000007e-05, + "loss": 0.0138, + "step": 4820 + }, + { + "epoch": 0.966, + "grad_norm": 0.0023929474409669638, + "learning_rate": 4.034e-05, + "loss": 0.0542, + "step": 4830 + }, + { + "epoch": 0.968, + "grad_norm": 0.36933594942092896, + "learning_rate": 4.032e-05, + "loss": 0.0055, + "step": 4840 + }, + { + "epoch": 0.97, + "grad_norm": 0.0013171678874641657, + "learning_rate": 4.0300000000000004e-05, + "loss": 0.0121, + "step": 4850 + }, + { + "epoch": 0.972, + "grad_norm": 0.0078110783360898495, + "learning_rate": 4.028e-05, + "loss": 0.0356, + "step": 4860 + }, + { + "epoch": 0.974, + "grad_norm": 0.0026302209589630365, + "learning_rate": 4.0260000000000004e-05, + "loss": 0.0472, + "step": 4870 + }, + { + "epoch": 0.976, + "grad_norm": 0.008590531535446644, + "learning_rate": 4.024e-05, + "loss": 0.0344, + "step": 4880 + }, + { + "epoch": 0.978, + "grad_norm": 0.0045235357247292995, + "learning_rate": 4.0220000000000005e-05, + "loss": 0.0314, + "step": 4890 + }, + { + "epoch": 0.98, + "grad_norm": 0.012373453937470913, + "learning_rate": 4.02e-05, + "loss": 0.0508, + "step": 4900 + }, + { + "epoch": 0.982, + "grad_norm": 4.436814785003662, + "learning_rate": 4.018e-05, + "loss": 0.0161, + "step": 4910 + }, + { + "epoch": 0.984, + "grad_norm": 0.010414298623800278, + "learning_rate": 4.016e-05, + "loss": 0.058, + "step": 4920 + }, + { + "epoch": 0.986, + "grad_norm": 0.17478038370609283, + "learning_rate": 4.014e-05, + "loss": 0.002, + "step": 4930 + }, + { + "epoch": 0.988, + "grad_norm": 0.11370497941970825, + "learning_rate": 4.012e-05, + "loss": 0.018, + "step": 4940 + }, + { + "epoch": 0.99, + "grad_norm": 0.00878177396953106, + "learning_rate": 4.0100000000000006e-05, + "loss": 0.0273, + "step": 4950 + }, + { + "epoch": 0.992, + "grad_norm": 0.004751706030219793, + "learning_rate": 4.008e-05, + "loss": 0.0365, + "step": 4960 + }, + { + "epoch": 0.994, + "grad_norm": 0.6816898584365845, + "learning_rate": 4.0060000000000006e-05, + "loss": 0.0021, + "step": 4970 + }, + { + "epoch": 0.996, + "grad_norm": 0.023028748109936714, + "learning_rate": 4.004e-05, + "loss": 0.0046, + "step": 4980 + }, + { + "epoch": 0.998, + "grad_norm": 0.008332543075084686, + "learning_rate": 4.002e-05, + "loss": 0.0289, + "step": 4990 + }, + { + "epoch": 1.0, + "grad_norm": 0.7706586718559265, + "learning_rate": 4e-05, + "loss": 0.0807, + "step": 5000 + }, + { + "epoch": 1.0, + "eval_loss": 0.016496026888489723, + "eval_runtime": 91.0716, + "eval_samples_per_second": 109.804, + "eval_steps_per_second": 13.725, + "step": 5000 + }, + { + "epoch": 1.002, + "grad_norm": 0.3708021640777588, + "learning_rate": 3.998e-05, + "loss": 0.072, + "step": 5010 + }, + { + "epoch": 1.004, + "grad_norm": 0.47253674268722534, + "learning_rate": 3.9960000000000004e-05, + "loss": 0.0386, + "step": 5020 + }, + { + "epoch": 1.006, + "grad_norm": 0.04058856889605522, + "learning_rate": 3.994e-05, + "loss": 0.022, + "step": 5030 + }, + { + "epoch": 1.008, + "grad_norm": 0.0035456023178994656, + "learning_rate": 3.9920000000000004e-05, + "loss": 0.008, + "step": 5040 + }, + { + "epoch": 1.01, + "grad_norm": 0.00531780207529664, + "learning_rate": 3.99e-05, + "loss": 0.0622, + "step": 5050 + }, + { + "epoch": 1.012, + "grad_norm": 3.454925298690796, + "learning_rate": 3.988e-05, + "loss": 0.0087, + "step": 5060 + }, + { + "epoch": 1.014, + "grad_norm": 0.017686668783426285, + "learning_rate": 3.986e-05, + "loss": 0.0291, + "step": 5070 + }, + { + "epoch": 1.016, + "grad_norm": 0.007442487869411707, + "learning_rate": 3.984e-05, + "loss": 0.0094, + "step": 5080 + }, + { + "epoch": 1.018, + "grad_norm": 0.12783612310886383, + "learning_rate": 3.982e-05, + "loss": 0.004, + "step": 5090 + }, + { + "epoch": 1.02, + "grad_norm": 0.4321322441101074, + "learning_rate": 3.9800000000000005e-05, + "loss": 0.001, + "step": 5100 + }, + { + "epoch": 1.022, + "grad_norm": 0.008714113384485245, + "learning_rate": 3.978e-05, + "loss": 0.0153, + "step": 5110 + }, + { + "epoch": 1.024, + "grad_norm": 0.8053523302078247, + "learning_rate": 3.9760000000000006e-05, + "loss": 0.0015, + "step": 5120 + }, + { + "epoch": 1.026, + "grad_norm": 0.007202483713626862, + "learning_rate": 3.974e-05, + "loss": 0.0638, + "step": 5130 + }, + { + "epoch": 1.028, + "grad_norm": 0.005011177621781826, + "learning_rate": 3.972e-05, + "loss": 0.015, + "step": 5140 + }, + { + "epoch": 1.03, + "grad_norm": 0.014276087284088135, + "learning_rate": 3.97e-05, + "loss": 0.0396, + "step": 5150 + }, + { + "epoch": 1.032, + "grad_norm": 0.02816883847117424, + "learning_rate": 3.968e-05, + "loss": 0.0009, + "step": 5160 + }, + { + "epoch": 1.034, + "grad_norm": 0.012690484523773193, + "learning_rate": 3.966e-05, + "loss": 0.0004, + "step": 5170 + }, + { + "epoch": 1.036, + "grad_norm": 0.01476860698312521, + "learning_rate": 3.964e-05, + "loss": 0.0265, + "step": 5180 + }, + { + "epoch": 1.038, + "grad_norm": 0.04537089914083481, + "learning_rate": 3.9620000000000004e-05, + "loss": 0.0569, + "step": 5190 + }, + { + "epoch": 1.04, + "grad_norm": 0.004969718400388956, + "learning_rate": 3.960000000000001e-05, + "loss": 0.0908, + "step": 5200 + }, + { + "epoch": 1.042, + "grad_norm": 0.03211864456534386, + "learning_rate": 3.958e-05, + "loss": 0.0028, + "step": 5210 + }, + { + "epoch": 1.044, + "grad_norm": 0.058294814079999924, + "learning_rate": 3.956e-05, + "loss": 0.001, + "step": 5220 + }, + { + "epoch": 1.046, + "grad_norm": 0.015622718259692192, + "learning_rate": 3.954e-05, + "loss": 0.017, + "step": 5230 + }, + { + "epoch": 1.048, + "grad_norm": 0.014199018478393555, + "learning_rate": 3.952e-05, + "loss": 0.0462, + "step": 5240 + }, + { + "epoch": 1.05, + "grad_norm": 5.004390716552734, + "learning_rate": 3.9500000000000005e-05, + "loss": 0.0108, + "step": 5250 + }, + { + "epoch": 1.052, + "grad_norm": 0.0037176634650677443, + "learning_rate": 3.948e-05, + "loss": 0.0018, + "step": 5260 + }, + { + "epoch": 1.054, + "grad_norm": 0.19786491990089417, + "learning_rate": 3.9460000000000005e-05, + "loss": 0.0017, + "step": 5270 + }, + { + "epoch": 1.056, + "grad_norm": 0.002558846725150943, + "learning_rate": 3.944e-05, + "loss": 0.0425, + "step": 5280 + }, + { + "epoch": 1.058, + "grad_norm": 0.044299229979515076, + "learning_rate": 3.942e-05, + "loss": 0.0135, + "step": 5290 + }, + { + "epoch": 1.06, + "grad_norm": 0.027034727856516838, + "learning_rate": 3.94e-05, + "loss": 0.0005, + "step": 5300 + }, + { + "epoch": 1.062, + "grad_norm": 0.009834056720137596, + "learning_rate": 3.938e-05, + "loss": 0.019, + "step": 5310 + }, + { + "epoch": 1.064, + "grad_norm": 0.11118980497121811, + "learning_rate": 3.936e-05, + "loss": 0.0018, + "step": 5320 + }, + { + "epoch": 1.066, + "grad_norm": 0.05060764402151108, + "learning_rate": 3.9340000000000006e-05, + "loss": 0.0017, + "step": 5330 + }, + { + "epoch": 1.068, + "grad_norm": 0.011010345071554184, + "learning_rate": 3.932e-05, + "loss": 0.002, + "step": 5340 + }, + { + "epoch": 1.07, + "grad_norm": 0.005932790692895651, + "learning_rate": 3.9300000000000007e-05, + "loss": 0.0043, + "step": 5350 + }, + { + "epoch": 1.072, + "grad_norm": 0.10913924872875214, + "learning_rate": 3.9280000000000003e-05, + "loss": 0.0006, + "step": 5360 + }, + { + "epoch": 1.074, + "grad_norm": 0.030183983966708183, + "learning_rate": 3.926e-05, + "loss": 0.0758, + "step": 5370 + }, + { + "epoch": 1.076, + "grad_norm": 0.00351043906994164, + "learning_rate": 3.9240000000000004e-05, + "loss": 0.0004, + "step": 5380 + }, + { + "epoch": 1.078, + "grad_norm": 0.018562721088528633, + "learning_rate": 3.922e-05, + "loss": 0.0348, + "step": 5390 + }, + { + "epoch": 1.08, + "grad_norm": 0.015087517909705639, + "learning_rate": 3.9200000000000004e-05, + "loss": 0.002, + "step": 5400 + }, + { + "epoch": 1.082, + "grad_norm": 0.017430707812309265, + "learning_rate": 3.918e-05, + "loss": 0.0093, + "step": 5410 + }, + { + "epoch": 1.084, + "grad_norm": 3.7334983348846436, + "learning_rate": 3.9160000000000005e-05, + "loss": 0.0808, + "step": 5420 + }, + { + "epoch": 1.086, + "grad_norm": 4.454714298248291, + "learning_rate": 3.914e-05, + "loss": 0.109, + "step": 5430 + }, + { + "epoch": 1.088, + "grad_norm": 0.027748622000217438, + "learning_rate": 3.912e-05, + "loss": 0.0336, + "step": 5440 + }, + { + "epoch": 1.09, + "grad_norm": 0.01548692025244236, + "learning_rate": 3.91e-05, + "loss": 0.002, + "step": 5450 + }, + { + "epoch": 1.092, + "grad_norm": 0.008610161021351814, + "learning_rate": 3.908e-05, + "loss": 0.0358, + "step": 5460 + }, + { + "epoch": 1.094, + "grad_norm": 0.00959813967347145, + "learning_rate": 3.906e-05, + "loss": 0.0432, + "step": 5470 + }, + { + "epoch": 1.096, + "grad_norm": 0.006911910604685545, + "learning_rate": 3.9040000000000006e-05, + "loss": 0.0312, + "step": 5480 + }, + { + "epoch": 1.098, + "grad_norm": 0.19846265017986298, + "learning_rate": 3.902e-05, + "loss": 0.0019, + "step": 5490 + }, + { + "epoch": 1.1, + "grad_norm": 0.028356514871120453, + "learning_rate": 3.9000000000000006e-05, + "loss": 0.0291, + "step": 5500 + }, + { + "epoch": 1.102, + "grad_norm": 0.007134539540857077, + "learning_rate": 3.898e-05, + "loss": 0.0305, + "step": 5510 + }, + { + "epoch": 1.104, + "grad_norm": 0.004375297110527754, + "learning_rate": 3.896e-05, + "loss": 0.0089, + "step": 5520 + }, + { + "epoch": 1.106, + "grad_norm": 0.010319947265088558, + "learning_rate": 3.894e-05, + "loss": 0.0333, + "step": 5530 + }, + { + "epoch": 1.108, + "grad_norm": 0.0030403188429772854, + "learning_rate": 3.892e-05, + "loss": 0.0247, + "step": 5540 + }, + { + "epoch": 1.11, + "grad_norm": 0.024173550307750702, + "learning_rate": 3.8900000000000004e-05, + "loss": 0.0335, + "step": 5550 + }, + { + "epoch": 1.112, + "grad_norm": 0.004791913088411093, + "learning_rate": 3.888e-05, + "loss": 0.0004, + "step": 5560 + }, + { + "epoch": 1.114, + "grad_norm": 0.0013288178015500307, + "learning_rate": 3.8860000000000004e-05, + "loss": 0.0142, + "step": 5570 + }, + { + "epoch": 1.116, + "grad_norm": 0.0035326327197253704, + "learning_rate": 3.884e-05, + "loss": 0.0226, + "step": 5580 + }, + { + "epoch": 1.1179999999999999, + "grad_norm": 0.004453238099813461, + "learning_rate": 3.882e-05, + "loss": 0.0104, + "step": 5590 + }, + { + "epoch": 1.12, + "grad_norm": 0.02598814107477665, + "learning_rate": 3.88e-05, + "loss": 0.0007, + "step": 5600 + }, + { + "epoch": 1.1219999999999999, + "grad_norm": 0.005187725182622671, + "learning_rate": 3.878e-05, + "loss": 0.0316, + "step": 5610 + }, + { + "epoch": 1.124, + "grad_norm": 0.002645947737619281, + "learning_rate": 3.876e-05, + "loss": 0.0394, + "step": 5620 + }, + { + "epoch": 1.126, + "grad_norm": 0.039730992168188095, + "learning_rate": 3.8740000000000005e-05, + "loss": 0.0451, + "step": 5630 + }, + { + "epoch": 1.1280000000000001, + "grad_norm": 0.04360891878604889, + "learning_rate": 3.872e-05, + "loss": 0.0011, + "step": 5640 + }, + { + "epoch": 1.13, + "grad_norm": 4.470123291015625, + "learning_rate": 3.8700000000000006e-05, + "loss": 0.0098, + "step": 5650 + }, + { + "epoch": 1.1320000000000001, + "grad_norm": 0.056175097823143005, + "learning_rate": 3.868e-05, + "loss": 0.0343, + "step": 5660 + }, + { + "epoch": 1.134, + "grad_norm": 6.098271369934082, + "learning_rate": 3.866e-05, + "loss": 0.061, + "step": 5670 + }, + { + "epoch": 1.1360000000000001, + "grad_norm": 0.5581873655319214, + "learning_rate": 3.864e-05, + "loss": 0.0014, + "step": 5680 + }, + { + "epoch": 1.138, + "grad_norm": 2.2527987957000732, + "learning_rate": 3.862e-05, + "loss": 0.0088, + "step": 5690 + }, + { + "epoch": 1.1400000000000001, + "grad_norm": 0.008088558912277222, + "learning_rate": 3.86e-05, + "loss": 0.0767, + "step": 5700 + }, + { + "epoch": 1.142, + "grad_norm": 0.036031920462846756, + "learning_rate": 3.858e-05, + "loss": 0.0007, + "step": 5710 + }, + { + "epoch": 1.144, + "grad_norm": 0.030742233619093895, + "learning_rate": 3.8560000000000004e-05, + "loss": 0.0008, + "step": 5720 + }, + { + "epoch": 1.146, + "grad_norm": 0.0673534944653511, + "learning_rate": 3.854000000000001e-05, + "loss": 0.0637, + "step": 5730 + }, + { + "epoch": 1.148, + "grad_norm": 0.016540292650461197, + "learning_rate": 3.8520000000000004e-05, + "loss": 0.041, + "step": 5740 + }, + { + "epoch": 1.15, + "grad_norm": 0.05118989571928978, + "learning_rate": 3.85e-05, + "loss": 0.0011, + "step": 5750 + }, + { + "epoch": 1.152, + "grad_norm": 0.0019592768512666225, + "learning_rate": 3.848e-05, + "loss": 0.0168, + "step": 5760 + }, + { + "epoch": 1.154, + "grad_norm": 0.2414097934961319, + "learning_rate": 3.846e-05, + "loss": 0.0008, + "step": 5770 + }, + { + "epoch": 1.156, + "grad_norm": 1.9626208543777466, + "learning_rate": 3.8440000000000005e-05, + "loss": 0.0078, + "step": 5780 + }, + { + "epoch": 1.158, + "grad_norm": 0.006050242111086845, + "learning_rate": 3.842e-05, + "loss": 0.034, + "step": 5790 + }, + { + "epoch": 1.16, + "grad_norm": 0.0023852037265896797, + "learning_rate": 3.8400000000000005e-05, + "loss": 0.0318, + "step": 5800 + }, + { + "epoch": 1.162, + "grad_norm": 0.014977013692259789, + "learning_rate": 3.838e-05, + "loss": 0.0339, + "step": 5810 + }, + { + "epoch": 1.164, + "grad_norm": 0.00630151154473424, + "learning_rate": 3.836e-05, + "loss": 0.017, + "step": 5820 + }, + { + "epoch": 1.166, + "grad_norm": 2.7326033115386963, + "learning_rate": 3.834e-05, + "loss": 0.0497, + "step": 5830 + }, + { + "epoch": 1.168, + "grad_norm": 0.01748991757631302, + "learning_rate": 3.832e-05, + "loss": 0.0259, + "step": 5840 + }, + { + "epoch": 1.17, + "grad_norm": 0.04222341999411583, + "learning_rate": 3.83e-05, + "loss": 0.0725, + "step": 5850 + }, + { + "epoch": 1.172, + "grad_norm": 0.013558843173086643, + "learning_rate": 3.828e-05, + "loss": 0.0557, + "step": 5860 + }, + { + "epoch": 1.174, + "grad_norm": 0.0790916159749031, + "learning_rate": 3.826e-05, + "loss": 0.0354, + "step": 5870 + }, + { + "epoch": 1.176, + "grad_norm": 0.038206301629543304, + "learning_rate": 3.8240000000000007e-05, + "loss": 0.0337, + "step": 5880 + }, + { + "epoch": 1.178, + "grad_norm": 0.033259790390729904, + "learning_rate": 3.822e-05, + "loss": 0.0588, + "step": 5890 + }, + { + "epoch": 1.18, + "grad_norm": 0.9643262028694153, + "learning_rate": 3.82e-05, + "loss": 0.014, + "step": 5900 + }, + { + "epoch": 1.182, + "grad_norm": 0.007181926164776087, + "learning_rate": 3.818e-05, + "loss": 0.0247, + "step": 5910 + }, + { + "epoch": 1.184, + "grad_norm": 0.09112616628408432, + "learning_rate": 3.816e-05, + "loss": 0.0211, + "step": 5920 + }, + { + "epoch": 1.186, + "grad_norm": 0.006056137848645449, + "learning_rate": 3.8140000000000004e-05, + "loss": 0.0186, + "step": 5930 + }, + { + "epoch": 1.188, + "grad_norm": 0.029104258865118027, + "learning_rate": 3.812e-05, + "loss": 0.0327, + "step": 5940 + }, + { + "epoch": 1.19, + "grad_norm": 0.02636542171239853, + "learning_rate": 3.8100000000000005e-05, + "loss": 0.0018, + "step": 5950 + }, + { + "epoch": 1.192, + "grad_norm": 0.058980781584978104, + "learning_rate": 3.808e-05, + "loss": 0.0152, + "step": 5960 + }, + { + "epoch": 1.194, + "grad_norm": 0.09248950332403183, + "learning_rate": 3.806e-05, + "loss": 0.0325, + "step": 5970 + }, + { + "epoch": 1.196, + "grad_norm": 0.0237438902258873, + "learning_rate": 3.804e-05, + "loss": 0.0018, + "step": 5980 + }, + { + "epoch": 1.198, + "grad_norm": 0.006177122704684734, + "learning_rate": 3.802e-05, + "loss": 0.0365, + "step": 5990 + }, + { + "epoch": 1.2, + "grad_norm": 0.6815275549888611, + "learning_rate": 3.8e-05, + "loss": 0.026, + "step": 6000 + }, + { + "epoch": 1.202, + "grad_norm": 0.010962256230413914, + "learning_rate": 3.7980000000000006e-05, + "loss": 0.0847, + "step": 6010 + }, + { + "epoch": 1.204, + "grad_norm": 0.024839259684085846, + "learning_rate": 3.796e-05, + "loss": 0.0283, + "step": 6020 + }, + { + "epoch": 1.206, + "grad_norm": 0.003172196913510561, + "learning_rate": 3.7940000000000006e-05, + "loss": 0.0146, + "step": 6030 + }, + { + "epoch": 1.208, + "grad_norm": 0.015222206711769104, + "learning_rate": 3.792e-05, + "loss": 0.0188, + "step": 6040 + }, + { + "epoch": 1.21, + "grad_norm": 10.11510181427002, + "learning_rate": 3.79e-05, + "loss": 0.0616, + "step": 6050 + }, + { + "epoch": 1.212, + "grad_norm": 0.20069774985313416, + "learning_rate": 3.788e-05, + "loss": 0.0099, + "step": 6060 + }, + { + "epoch": 1.214, + "grad_norm": 0.021253420040011406, + "learning_rate": 3.786e-05, + "loss": 0.0869, + "step": 6070 + }, + { + "epoch": 1.216, + "grad_norm": 0.016821395605802536, + "learning_rate": 3.7840000000000004e-05, + "loss": 0.0076, + "step": 6080 + }, + { + "epoch": 1.218, + "grad_norm": 0.032200634479522705, + "learning_rate": 3.782e-05, + "loss": 0.0549, + "step": 6090 + }, + { + "epoch": 1.22, + "grad_norm": 0.015066594816744328, + "learning_rate": 3.7800000000000004e-05, + "loss": 0.0348, + "step": 6100 + }, + { + "epoch": 1.222, + "grad_norm": 0.027172857895493507, + "learning_rate": 3.778000000000001e-05, + "loss": 0.0022, + "step": 6110 + }, + { + "epoch": 1.224, + "grad_norm": 5.351569175720215, + "learning_rate": 3.776e-05, + "loss": 0.007, + "step": 6120 + }, + { + "epoch": 1.226, + "grad_norm": 0.017085228115320206, + "learning_rate": 3.774e-05, + "loss": 0.0018, + "step": 6130 + }, + { + "epoch": 1.228, + "grad_norm": 2.938812017440796, + "learning_rate": 3.772e-05, + "loss": 0.0356, + "step": 6140 + }, + { + "epoch": 1.23, + "grad_norm": 0.009132993407547474, + "learning_rate": 3.77e-05, + "loss": 0.0085, + "step": 6150 + }, + { + "epoch": 1.232, + "grad_norm": 0.007850081659853458, + "learning_rate": 3.7680000000000005e-05, + "loss": 0.0086, + "step": 6160 + }, + { + "epoch": 1.234, + "grad_norm": 0.007628438528627157, + "learning_rate": 3.766e-05, + "loss": 0.004, + "step": 6170 + }, + { + "epoch": 1.236, + "grad_norm": 0.05944610759615898, + "learning_rate": 3.7640000000000006e-05, + "loss": 0.0347, + "step": 6180 + }, + { + "epoch": 1.238, + "grad_norm": 0.06916775554418564, + "learning_rate": 3.762e-05, + "loss": 0.0031, + "step": 6190 + }, + { + "epoch": 1.24, + "grad_norm": 0.0030985125340521336, + "learning_rate": 3.76e-05, + "loss": 0.057, + "step": 6200 + }, + { + "epoch": 1.242, + "grad_norm": 5.392228126525879, + "learning_rate": 3.758e-05, + "loss": 0.0098, + "step": 6210 + }, + { + "epoch": 1.244, + "grad_norm": 0.014599272049963474, + "learning_rate": 3.756e-05, + "loss": 0.096, + "step": 6220 + }, + { + "epoch": 1.246, + "grad_norm": 0.005305968225002289, + "learning_rate": 3.754e-05, + "loss": 0.0026, + "step": 6230 + }, + { + "epoch": 1.248, + "grad_norm": 0.016365058720111847, + "learning_rate": 3.752e-05, + "loss": 0.0005, + "step": 6240 + }, + { + "epoch": 1.25, + "grad_norm": 0.02052203379571438, + "learning_rate": 3.7500000000000003e-05, + "loss": 0.0026, + "step": 6250 + }, + { + "epoch": 1.252, + "grad_norm": 0.027127837762236595, + "learning_rate": 3.748000000000001e-05, + "loss": 0.0031, + "step": 6260 + }, + { + "epoch": 1.254, + "grad_norm": 0.011660023592412472, + "learning_rate": 3.7460000000000004e-05, + "loss": 0.0007, + "step": 6270 + }, + { + "epoch": 1.256, + "grad_norm": 0.0043875472620129585, + "learning_rate": 3.744e-05, + "loss": 0.0381, + "step": 6280 + }, + { + "epoch": 1.258, + "grad_norm": 0.004166820086538792, + "learning_rate": 3.742e-05, + "loss": 0.0005, + "step": 6290 + }, + { + "epoch": 1.26, + "grad_norm": 0.010409657843410969, + "learning_rate": 3.74e-05, + "loss": 0.0004, + "step": 6300 + }, + { + "epoch": 1.262, + "grad_norm": 0.005970802158117294, + "learning_rate": 3.7380000000000005e-05, + "loss": 0.0003, + "step": 6310 + }, + { + "epoch": 1.264, + "grad_norm": 0.001832369016483426, + "learning_rate": 3.736e-05, + "loss": 0.0008, + "step": 6320 + }, + { + "epoch": 1.266, + "grad_norm": 4.269883632659912, + "learning_rate": 3.7340000000000005e-05, + "loss": 0.0912, + "step": 6330 + }, + { + "epoch": 1.268, + "grad_norm": 0.018679887056350708, + "learning_rate": 3.732e-05, + "loss": 0.0231, + "step": 6340 + }, + { + "epoch": 1.27, + "grad_norm": 0.0038905141409486532, + "learning_rate": 3.73e-05, + "loss": 0.0963, + "step": 6350 + }, + { + "epoch": 1.272, + "grad_norm": 0.0063622696325182915, + "learning_rate": 3.728e-05, + "loss": 0.0363, + "step": 6360 + }, + { + "epoch": 1.274, + "grad_norm": 7.225616931915283, + "learning_rate": 3.726e-05, + "loss": 0.0092, + "step": 6370 + }, + { + "epoch": 1.276, + "grad_norm": 0.19642934203147888, + "learning_rate": 3.724e-05, + "loss": 0.0198, + "step": 6380 + }, + { + "epoch": 1.278, + "grad_norm": 0.4732407033443451, + "learning_rate": 3.722e-05, + "loss": 0.0282, + "step": 6390 + }, + { + "epoch": 1.28, + "grad_norm": 0.008111849427223206, + "learning_rate": 3.72e-05, + "loss": 0.0011, + "step": 6400 + }, + { + "epoch": 1.282, + "grad_norm": 0.01679563894867897, + "learning_rate": 3.7180000000000007e-05, + "loss": 0.0026, + "step": 6410 + }, + { + "epoch": 1.284, + "grad_norm": 0.007721078582108021, + "learning_rate": 3.716e-05, + "loss": 0.0026, + "step": 6420 + }, + { + "epoch": 1.286, + "grad_norm": 0.0026185016613453627, + "learning_rate": 3.714e-05, + "loss": 0.036, + "step": 6430 + }, + { + "epoch": 1.288, + "grad_norm": 0.020146319642663002, + "learning_rate": 3.712e-05, + "loss": 0.0123, + "step": 6440 + }, + { + "epoch": 1.29, + "grad_norm": 0.13414332270622253, + "learning_rate": 3.71e-05, + "loss": 0.0607, + "step": 6450 + }, + { + "epoch": 1.292, + "grad_norm": 0.006200847215950489, + "learning_rate": 3.7080000000000004e-05, + "loss": 0.022, + "step": 6460 + }, + { + "epoch": 1.294, + "grad_norm": 5.099764823913574, + "learning_rate": 3.706e-05, + "loss": 0.0139, + "step": 6470 + }, + { + "epoch": 1.296, + "grad_norm": 0.02171763963997364, + "learning_rate": 3.7040000000000005e-05, + "loss": 0.0077, + "step": 6480 + }, + { + "epoch": 1.298, + "grad_norm": 0.008273259736597538, + "learning_rate": 3.702e-05, + "loss": 0.001, + "step": 6490 + }, + { + "epoch": 1.3, + "grad_norm": 0.004964989610016346, + "learning_rate": 3.7e-05, + "loss": 0.051, + "step": 6500 + }, + { + "epoch": 1.302, + "grad_norm": 0.008970340713858604, + "learning_rate": 3.698e-05, + "loss": 0.0004, + "step": 6510 + }, + { + "epoch": 1.304, + "grad_norm": 0.04173256456851959, + "learning_rate": 3.696e-05, + "loss": 0.0006, + "step": 6520 + }, + { + "epoch": 1.306, + "grad_norm": 0.005524017848074436, + "learning_rate": 3.694e-05, + "loss": 0.0003, + "step": 6530 + }, + { + "epoch": 1.308, + "grad_norm": 5.520667552947998, + "learning_rate": 3.692e-05, + "loss": 0.0523, + "step": 6540 + }, + { + "epoch": 1.31, + "grad_norm": 0.0042858850210905075, + "learning_rate": 3.69e-05, + "loss": 0.0007, + "step": 6550 + }, + { + "epoch": 1.312, + "grad_norm": 7.040893077850342, + "learning_rate": 3.6880000000000006e-05, + "loss": 0.1054, + "step": 6560 + }, + { + "epoch": 1.314, + "grad_norm": 0.0050101811066269875, + "learning_rate": 3.686e-05, + "loss": 0.0344, + "step": 6570 + }, + { + "epoch": 1.316, + "grad_norm": 0.39104142785072327, + "learning_rate": 3.684e-05, + "loss": 0.0063, + "step": 6580 + }, + { + "epoch": 1.318, + "grad_norm": 0.005544619634747505, + "learning_rate": 3.682e-05, + "loss": 0.0082, + "step": 6590 + }, + { + "epoch": 1.32, + "grad_norm": 5.666801452636719, + "learning_rate": 3.68e-05, + "loss": 0.058, + "step": 6600 + }, + { + "epoch": 1.322, + "grad_norm": 0.04448606073856354, + "learning_rate": 3.6780000000000004e-05, + "loss": 0.0059, + "step": 6610 + }, + { + "epoch": 1.324, + "grad_norm": 0.007718035951256752, + "learning_rate": 3.676e-05, + "loss": 0.0012, + "step": 6620 + }, + { + "epoch": 1.326, + "grad_norm": 0.0031523483339697123, + "learning_rate": 3.6740000000000004e-05, + "loss": 0.0535, + "step": 6630 + }, + { + "epoch": 1.328, + "grad_norm": 8.072606086730957, + "learning_rate": 3.672000000000001e-05, + "loss": 0.0175, + "step": 6640 + }, + { + "epoch": 1.33, + "grad_norm": 0.01579626277089119, + "learning_rate": 3.6700000000000004e-05, + "loss": 0.0361, + "step": 6650 + }, + { + "epoch": 1.332, + "grad_norm": 0.006640156265348196, + "learning_rate": 3.668e-05, + "loss": 0.0742, + "step": 6660 + }, + { + "epoch": 1.334, + "grad_norm": 0.027232594788074493, + "learning_rate": 3.666e-05, + "loss": 0.0408, + "step": 6670 + }, + { + "epoch": 1.336, + "grad_norm": 0.01830277405679226, + "learning_rate": 3.664e-05, + "loss": 0.0106, + "step": 6680 + }, + { + "epoch": 1.338, + "grad_norm": 0.0011746941599994898, + "learning_rate": 3.6620000000000005e-05, + "loss": 0.0152, + "step": 6690 + }, + { + "epoch": 1.34, + "grad_norm": 0.10199079662561417, + "learning_rate": 3.66e-05, + "loss": 0.0375, + "step": 6700 + }, + { + "epoch": 1.342, + "grad_norm": 0.0049275667406618595, + "learning_rate": 3.6580000000000006e-05, + "loss": 0.0301, + "step": 6710 + }, + { + "epoch": 1.3439999999999999, + "grad_norm": 0.1531529277563095, + "learning_rate": 3.656e-05, + "loss": 0.0135, + "step": 6720 + }, + { + "epoch": 1.346, + "grad_norm": 3.4101831912994385, + "learning_rate": 3.654e-05, + "loss": 0.0459, + "step": 6730 + }, + { + "epoch": 1.3479999999999999, + "grad_norm": 0.02141091600060463, + "learning_rate": 3.652e-05, + "loss": 0.0061, + "step": 6740 + }, + { + "epoch": 1.35, + "grad_norm": 0.01527430210262537, + "learning_rate": 3.65e-05, + "loss": 0.09, + "step": 6750 + }, + { + "epoch": 1.3519999999999999, + "grad_norm": 0.007771384436637163, + "learning_rate": 3.648e-05, + "loss": 0.0069, + "step": 6760 + }, + { + "epoch": 1.354, + "grad_norm": 0.012513011693954468, + "learning_rate": 3.646e-05, + "loss": 0.0025, + "step": 6770 + }, + { + "epoch": 1.3559999999999999, + "grad_norm": 0.009514408186078072, + "learning_rate": 3.6440000000000003e-05, + "loss": 0.0008, + "step": 6780 + }, + { + "epoch": 1.358, + "grad_norm": 0.01860465668141842, + "learning_rate": 3.642000000000001e-05, + "loss": 0.0726, + "step": 6790 + }, + { + "epoch": 1.3599999999999999, + "grad_norm": 0.2263597846031189, + "learning_rate": 3.6400000000000004e-05, + "loss": 0.0188, + "step": 6800 + }, + { + "epoch": 1.362, + "grad_norm": 5.2578654289245605, + "learning_rate": 3.638e-05, + "loss": 0.0486, + "step": 6810 + }, + { + "epoch": 1.3639999999999999, + "grad_norm": 0.008457668125629425, + "learning_rate": 3.636e-05, + "loss": 0.0071, + "step": 6820 + }, + { + "epoch": 1.366, + "grad_norm": 0.012101087719202042, + "learning_rate": 3.634e-05, + "loss": 0.0286, + "step": 6830 + }, + { + "epoch": 1.3679999999999999, + "grad_norm": 0.04453659430146217, + "learning_rate": 3.6320000000000005e-05, + "loss": 0.0014, + "step": 6840 + }, + { + "epoch": 1.37, + "grad_norm": 0.4392252564430237, + "learning_rate": 3.63e-05, + "loss": 0.0015, + "step": 6850 + }, + { + "epoch": 1.3719999999999999, + "grad_norm": 0.0513470284640789, + "learning_rate": 3.6280000000000005e-05, + "loss": 0.0477, + "step": 6860 + }, + { + "epoch": 1.374, + "grad_norm": 0.006927871610969305, + "learning_rate": 3.626e-05, + "loss": 0.0525, + "step": 6870 + }, + { + "epoch": 1.376, + "grad_norm": 0.04651854932308197, + "learning_rate": 3.624e-05, + "loss": 0.0282, + "step": 6880 + }, + { + "epoch": 1.3780000000000001, + "grad_norm": 7.312324523925781, + "learning_rate": 3.622e-05, + "loss": 0.0129, + "step": 6890 + }, + { + "epoch": 1.38, + "grad_norm": 0.0063580735586583614, + "learning_rate": 3.62e-05, + "loss": 0.0104, + "step": 6900 + }, + { + "epoch": 1.3820000000000001, + "grad_norm": 0.006133576389402151, + "learning_rate": 3.618e-05, + "loss": 0.0012, + "step": 6910 + }, + { + "epoch": 1.384, + "grad_norm": 0.005199335515499115, + "learning_rate": 3.616e-05, + "loss": 0.0239, + "step": 6920 + }, + { + "epoch": 1.3860000000000001, + "grad_norm": 0.10993275791406631, + "learning_rate": 3.614e-05, + "loss": 0.0036, + "step": 6930 + }, + { + "epoch": 1.388, + "grad_norm": 0.006421110592782497, + "learning_rate": 3.6120000000000007e-05, + "loss": 0.0265, + "step": 6940 + }, + { + "epoch": 1.3900000000000001, + "grad_norm": 0.008753607049584389, + "learning_rate": 3.61e-05, + "loss": 0.0847, + "step": 6950 + }, + { + "epoch": 1.392, + "grad_norm": 0.004009403754025698, + "learning_rate": 3.608e-05, + "loss": 0.001, + "step": 6960 + }, + { + "epoch": 1.3940000000000001, + "grad_norm": 0.27608707547187805, + "learning_rate": 3.606e-05, + "loss": 0.0025, + "step": 6970 + }, + { + "epoch": 1.396, + "grad_norm": 0.002582039451226592, + "learning_rate": 3.604e-05, + "loss": 0.0256, + "step": 6980 + }, + { + "epoch": 1.3980000000000001, + "grad_norm": 0.004374573472887278, + "learning_rate": 3.6020000000000004e-05, + "loss": 0.0006, + "step": 6990 + }, + { + "epoch": 1.4, + "grad_norm": 0.06569258868694305, + "learning_rate": 3.6e-05, + "loss": 0.0446, + "step": 7000 + }, + { + "epoch": 1.4020000000000001, + "grad_norm": 0.007347286678850651, + "learning_rate": 3.5980000000000004e-05, + "loss": 0.0015, + "step": 7010 + }, + { + "epoch": 1.404, + "grad_norm": 0.00714182248339057, + "learning_rate": 3.596e-05, + "loss": 0.0138, + "step": 7020 + }, + { + "epoch": 1.4060000000000001, + "grad_norm": 0.005495099350810051, + "learning_rate": 3.594e-05, + "loss": 0.0006, + "step": 7030 + }, + { + "epoch": 1.408, + "grad_norm": 0.2169579118490219, + "learning_rate": 3.592e-05, + "loss": 0.0022, + "step": 7040 + }, + { + "epoch": 1.41, + "grad_norm": 0.006320688873529434, + "learning_rate": 3.59e-05, + "loss": 0.0009, + "step": 7050 + }, + { + "epoch": 1.412, + "grad_norm": 0.004149855114519596, + "learning_rate": 3.588e-05, + "loss": 0.0007, + "step": 7060 + }, + { + "epoch": 1.414, + "grad_norm": 0.163759246468544, + "learning_rate": 3.586e-05, + "loss": 0.0396, + "step": 7070 + }, + { + "epoch": 1.416, + "grad_norm": 3.08158802986145, + "learning_rate": 3.584e-05, + "loss": 0.0337, + "step": 7080 + }, + { + "epoch": 1.418, + "grad_norm": 0.06443344056606293, + "learning_rate": 3.5820000000000006e-05, + "loss": 0.0085, + "step": 7090 + }, + { + "epoch": 1.42, + "grad_norm": 0.13582125306129456, + "learning_rate": 3.58e-05, + "loss": 0.0372, + "step": 7100 + }, + { + "epoch": 1.422, + "grad_norm": 0.04201153665781021, + "learning_rate": 3.578e-05, + "loss": 0.0481, + "step": 7110 + }, + { + "epoch": 1.424, + "grad_norm": 0.7387461066246033, + "learning_rate": 3.5759999999999996e-05, + "loss": 0.094, + "step": 7120 + }, + { + "epoch": 1.426, + "grad_norm": 0.0052914912812411785, + "learning_rate": 3.574e-05, + "loss": 0.0677, + "step": 7130 + }, + { + "epoch": 1.428, + "grad_norm": 0.03011983446776867, + "learning_rate": 3.5720000000000004e-05, + "loss": 0.0548, + "step": 7140 + }, + { + "epoch": 1.43, + "grad_norm": 0.024330249056220055, + "learning_rate": 3.57e-05, + "loss": 0.0254, + "step": 7150 + }, + { + "epoch": 1.432, + "grad_norm": 0.02639131061732769, + "learning_rate": 3.5680000000000004e-05, + "loss": 0.0037, + "step": 7160 + }, + { + "epoch": 1.434, + "grad_norm": 0.008115893229842186, + "learning_rate": 3.566e-05, + "loss": 0.0022, + "step": 7170 + }, + { + "epoch": 1.436, + "grad_norm": 0.031278759241104126, + "learning_rate": 3.5640000000000004e-05, + "loss": 0.0404, + "step": 7180 + }, + { + "epoch": 1.438, + "grad_norm": 0.01463502086699009, + "learning_rate": 3.562e-05, + "loss": 0.009, + "step": 7190 + }, + { + "epoch": 1.44, + "grad_norm": 0.009483623318374157, + "learning_rate": 3.56e-05, + "loss": 0.0004, + "step": 7200 + }, + { + "epoch": 1.442, + "grad_norm": 0.003925948403775692, + "learning_rate": 3.558e-05, + "loss": 0.0235, + "step": 7210 + }, + { + "epoch": 1.444, + "grad_norm": 0.021284854039549828, + "learning_rate": 3.5560000000000005e-05, + "loss": 0.0628, + "step": 7220 + }, + { + "epoch": 1.446, + "grad_norm": 0.00911774393171072, + "learning_rate": 3.554e-05, + "loss": 0.064, + "step": 7230 + }, + { + "epoch": 1.448, + "grad_norm": 7.336594581604004, + "learning_rate": 3.5520000000000006e-05, + "loss": 0.0262, + "step": 7240 + }, + { + "epoch": 1.45, + "grad_norm": 0.018722884356975555, + "learning_rate": 3.55e-05, + "loss": 0.0041, + "step": 7250 + }, + { + "epoch": 1.452, + "grad_norm": 7.197745323181152, + "learning_rate": 3.548e-05, + "loss": 0.0204, + "step": 7260 + }, + { + "epoch": 1.454, + "grad_norm": 0.029174281284213066, + "learning_rate": 3.546e-05, + "loss": 0.0115, + "step": 7270 + }, + { + "epoch": 1.456, + "grad_norm": 0.022253194823861122, + "learning_rate": 3.544e-05, + "loss": 0.005, + "step": 7280 + }, + { + "epoch": 1.458, + "grad_norm": 0.004992309492081404, + "learning_rate": 3.542e-05, + "loss": 0.0009, + "step": 7290 + }, + { + "epoch": 1.46, + "grad_norm": 0.013181679882109165, + "learning_rate": 3.54e-05, + "loss": 0.0316, + "step": 7300 + }, + { + "epoch": 1.462, + "grad_norm": 3.856898069381714, + "learning_rate": 3.5380000000000003e-05, + "loss": 0.0599, + "step": 7310 + }, + { + "epoch": 1.464, + "grad_norm": 0.015816127881407738, + "learning_rate": 3.536000000000001e-05, + "loss": 0.0051, + "step": 7320 + }, + { + "epoch": 1.466, + "grad_norm": 0.012996883131563663, + "learning_rate": 3.5340000000000004e-05, + "loss": 0.0018, + "step": 7330 + }, + { + "epoch": 1.468, + "grad_norm": 0.08451968431472778, + "learning_rate": 3.532e-05, + "loss": 0.0504, + "step": 7340 + }, + { + "epoch": 1.47, + "grad_norm": 0.01942392811179161, + "learning_rate": 3.53e-05, + "loss": 0.0338, + "step": 7350 + }, + { + "epoch": 1.472, + "grad_norm": 0.007807042449712753, + "learning_rate": 3.528e-05, + "loss": 0.0014, + "step": 7360 + }, + { + "epoch": 1.474, + "grad_norm": 0.35328736901283264, + "learning_rate": 3.5260000000000005e-05, + "loss": 0.0331, + "step": 7370 + }, + { + "epoch": 1.476, + "grad_norm": 0.007190255913883448, + "learning_rate": 3.524e-05, + "loss": 0.0023, + "step": 7380 + }, + { + "epoch": 1.478, + "grad_norm": 0.005118261557072401, + "learning_rate": 3.5220000000000005e-05, + "loss": 0.0117, + "step": 7390 + }, + { + "epoch": 1.48, + "grad_norm": 0.01209124457091093, + "learning_rate": 3.52e-05, + "loss": 0.0293, + "step": 7400 + }, + { + "epoch": 1.482, + "grad_norm": 0.0048452820628881454, + "learning_rate": 3.518e-05, + "loss": 0.0268, + "step": 7410 + }, + { + "epoch": 1.484, + "grad_norm": 0.004197114147245884, + "learning_rate": 3.516e-05, + "loss": 0.0012, + "step": 7420 + }, + { + "epoch": 1.486, + "grad_norm": 0.02889455109834671, + "learning_rate": 3.514e-05, + "loss": 0.0059, + "step": 7430 + }, + { + "epoch": 1.488, + "grad_norm": 0.0030213482677936554, + "learning_rate": 3.512e-05, + "loss": 0.0032, + "step": 7440 + }, + { + "epoch": 1.49, + "grad_norm": 0.008681685663759708, + "learning_rate": 3.51e-05, + "loss": 0.01, + "step": 7450 + }, + { + "epoch": 1.492, + "grad_norm": 0.0030685935635119677, + "learning_rate": 3.508e-05, + "loss": 0.0174, + "step": 7460 + }, + { + "epoch": 1.494, + "grad_norm": 0.0032798685133457184, + "learning_rate": 3.5060000000000007e-05, + "loss": 0.0443, + "step": 7470 + }, + { + "epoch": 1.496, + "grad_norm": 0.009949653409421444, + "learning_rate": 3.504e-05, + "loss": 0.0352, + "step": 7480 + }, + { + "epoch": 1.498, + "grad_norm": 0.7159678339958191, + "learning_rate": 3.502e-05, + "loss": 0.0952, + "step": 7490 + }, + { + "epoch": 1.5, + "grad_norm": 0.00631822319701314, + "learning_rate": 3.5e-05, + "loss": 0.0775, + "step": 7500 + }, + { + "epoch": 1.502, + "grad_norm": 0.09870759397745132, + "learning_rate": 3.498e-05, + "loss": 0.002, + "step": 7510 + }, + { + "epoch": 1.504, + "grad_norm": 0.018792299553751945, + "learning_rate": 3.4960000000000004e-05, + "loss": 0.0036, + "step": 7520 + }, + { + "epoch": 1.506, + "grad_norm": 0.8837906718254089, + "learning_rate": 3.494e-05, + "loss": 0.0052, + "step": 7530 + }, + { + "epoch": 1.508, + "grad_norm": 0.009946630336344242, + "learning_rate": 3.4920000000000004e-05, + "loss": 0.0012, + "step": 7540 + }, + { + "epoch": 1.51, + "grad_norm": 1.3218886852264404, + "learning_rate": 3.49e-05, + "loss": 0.0327, + "step": 7550 + }, + { + "epoch": 1.512, + "grad_norm": 0.44792118668556213, + "learning_rate": 3.4880000000000005e-05, + "loss": 0.0033, + "step": 7560 + }, + { + "epoch": 1.514, + "grad_norm": 0.05720994248986244, + "learning_rate": 3.486e-05, + "loss": 0.0006, + "step": 7570 + }, + { + "epoch": 1.516, + "grad_norm": 0.04546366259455681, + "learning_rate": 3.484e-05, + "loss": 0.0004, + "step": 7580 + }, + { + "epoch": 1.518, + "grad_norm": 0.003919788636267185, + "learning_rate": 3.482e-05, + "loss": 0.0001, + "step": 7590 + }, + { + "epoch": 1.52, + "grad_norm": 0.008556053973734379, + "learning_rate": 3.48e-05, + "loss": 0.0007, + "step": 7600 + }, + { + "epoch": 1.522, + "grad_norm": 0.012111378833651543, + "learning_rate": 3.478e-05, + "loss": 0.0052, + "step": 7610 + }, + { + "epoch": 1.524, + "grad_norm": 0.003584041493013501, + "learning_rate": 3.4760000000000006e-05, + "loss": 0.0003, + "step": 7620 + }, + { + "epoch": 1.526, + "grad_norm": 0.0034352331422269344, + "learning_rate": 3.474e-05, + "loss": 0.1027, + "step": 7630 + }, + { + "epoch": 1.528, + "grad_norm": 0.005325534846633673, + "learning_rate": 3.472e-05, + "loss": 0.0002, + "step": 7640 + }, + { + "epoch": 1.53, + "grad_norm": 2.399014949798584, + "learning_rate": 3.4699999999999996e-05, + "loss": 0.0229, + "step": 7650 + }, + { + "epoch": 1.532, + "grad_norm": 0.03801944851875305, + "learning_rate": 3.468e-05, + "loss": 0.0583, + "step": 7660 + }, + { + "epoch": 1.534, + "grad_norm": 4.686843395233154, + "learning_rate": 3.4660000000000004e-05, + "loss": 0.0621, + "step": 7670 + }, + { + "epoch": 1.536, + "grad_norm": 0.02090873382985592, + "learning_rate": 3.464e-05, + "loss": 0.0435, + "step": 7680 + }, + { + "epoch": 1.538, + "grad_norm": 0.003576928051188588, + "learning_rate": 3.4620000000000004e-05, + "loss": 0.0189, + "step": 7690 + }, + { + "epoch": 1.54, + "grad_norm": 0.00641475897282362, + "learning_rate": 3.46e-05, + "loss": 0.0065, + "step": 7700 + }, + { + "epoch": 1.542, + "grad_norm": 0.010015168227255344, + "learning_rate": 3.4580000000000004e-05, + "loss": 0.005, + "step": 7710 + }, + { + "epoch": 1.544, + "grad_norm": 0.002555366838350892, + "learning_rate": 3.456e-05, + "loss": 0.0736, + "step": 7720 + }, + { + "epoch": 1.546, + "grad_norm": 0.007367596495896578, + "learning_rate": 3.454e-05, + "loss": 0.0006, + "step": 7730 + }, + { + "epoch": 1.548, + "grad_norm": 4.383460521697998, + "learning_rate": 3.452e-05, + "loss": 0.0353, + "step": 7740 + }, + { + "epoch": 1.55, + "grad_norm": 8.912064552307129, + "learning_rate": 3.45e-05, + "loss": 0.0293, + "step": 7750 + }, + { + "epoch": 1.552, + "grad_norm": 0.026212628930807114, + "learning_rate": 3.448e-05, + "loss": 0.0319, + "step": 7760 + }, + { + "epoch": 1.554, + "grad_norm": 0.0010424124775454402, + "learning_rate": 3.4460000000000005e-05, + "loss": 0.0112, + "step": 7770 + }, + { + "epoch": 1.556, + "grad_norm": 0.23557564616203308, + "learning_rate": 3.444e-05, + "loss": 0.02, + "step": 7780 + }, + { + "epoch": 1.558, + "grad_norm": 0.016170961782336235, + "learning_rate": 3.442e-05, + "loss": 0.0408, + "step": 7790 + }, + { + "epoch": 1.56, + "grad_norm": 4.391359329223633, + "learning_rate": 3.4399999999999996e-05, + "loss": 0.0577, + "step": 7800 + }, + { + "epoch": 1.562, + "grad_norm": 0.023899197578430176, + "learning_rate": 3.438e-05, + "loss": 0.007, + "step": 7810 + }, + { + "epoch": 1.564, + "grad_norm": 1.6332088708877563, + "learning_rate": 3.436e-05, + "loss": 0.0548, + "step": 7820 + }, + { + "epoch": 1.5659999999999998, + "grad_norm": 0.06435178220272064, + "learning_rate": 3.434e-05, + "loss": 0.0006, + "step": 7830 + }, + { + "epoch": 1.568, + "grad_norm": 0.019996432587504387, + "learning_rate": 3.4320000000000003e-05, + "loss": 0.002, + "step": 7840 + }, + { + "epoch": 1.5699999999999998, + "grad_norm": 0.010787452571094036, + "learning_rate": 3.430000000000001e-05, + "loss": 0.0024, + "step": 7850 + }, + { + "epoch": 1.572, + "grad_norm": 0.012626455165445805, + "learning_rate": 3.4280000000000004e-05, + "loss": 0.0193, + "step": 7860 + }, + { + "epoch": 1.5739999999999998, + "grad_norm": 0.029891546815633774, + "learning_rate": 3.426e-05, + "loss": 0.0032, + "step": 7870 + }, + { + "epoch": 1.576, + "grad_norm": 0.006990303285419941, + "learning_rate": 3.424e-05, + "loss": 0.0304, + "step": 7880 + }, + { + "epoch": 1.5779999999999998, + "grad_norm": 0.005956538952887058, + "learning_rate": 3.422e-05, + "loss": 0.0004, + "step": 7890 + }, + { + "epoch": 1.58, + "grad_norm": 0.07458526641130447, + "learning_rate": 3.4200000000000005e-05, + "loss": 0.0878, + "step": 7900 + }, + { + "epoch": 1.5819999999999999, + "grad_norm": 0.010691751725971699, + "learning_rate": 3.418e-05, + "loss": 0.0007, + "step": 7910 + }, + { + "epoch": 1.584, + "grad_norm": 0.015178173780441284, + "learning_rate": 3.4160000000000005e-05, + "loss": 0.003, + "step": 7920 + }, + { + "epoch": 1.5859999999999999, + "grad_norm": 0.025410959497094154, + "learning_rate": 3.414e-05, + "loss": 0.0011, + "step": 7930 + }, + { + "epoch": 1.588, + "grad_norm": 0.014401872642338276, + "learning_rate": 3.412e-05, + "loss": 0.0046, + "step": 7940 + }, + { + "epoch": 1.5899999999999999, + "grad_norm": 0.006295850966125727, + "learning_rate": 3.41e-05, + "loss": 0.0879, + "step": 7950 + }, + { + "epoch": 1.592, + "grad_norm": 0.09975335747003555, + "learning_rate": 3.408e-05, + "loss": 0.0503, + "step": 7960 + }, + { + "epoch": 1.5939999999999999, + "grad_norm": 0.017794815823435783, + "learning_rate": 3.406e-05, + "loss": 0.0421, + "step": 7970 + }, + { + "epoch": 1.596, + "grad_norm": 0.9266834259033203, + "learning_rate": 3.404e-05, + "loss": 0.0038, + "step": 7980 + }, + { + "epoch": 1.5979999999999999, + "grad_norm": 0.013778113760054111, + "learning_rate": 3.402e-05, + "loss": 0.0005, + "step": 7990 + }, + { + "epoch": 1.6, + "grad_norm": 0.0268776323646307, + "learning_rate": 3.4000000000000007e-05, + "loss": 0.0183, + "step": 8000 + }, + { + "epoch": 1.6019999999999999, + "grad_norm": 0.006763918790966272, + "learning_rate": 3.398e-05, + "loss": 0.0108, + "step": 8010 + }, + { + "epoch": 1.604, + "grad_norm": 6.1889872550964355, + "learning_rate": 3.396e-05, + "loss": 0.0292, + "step": 8020 + }, + { + "epoch": 1.6059999999999999, + "grad_norm": 0.023212481290102005, + "learning_rate": 3.394e-05, + "loss": 0.0009, + "step": 8030 + }, + { + "epoch": 1.608, + "grad_norm": 0.33053529262542725, + "learning_rate": 3.392e-05, + "loss": 0.0011, + "step": 8040 + }, + { + "epoch": 1.6099999999999999, + "grad_norm": 0.005435945466160774, + "learning_rate": 3.3900000000000004e-05, + "loss": 0.042, + "step": 8050 + }, + { + "epoch": 1.612, + "grad_norm": 2.227583408355713, + "learning_rate": 3.388e-05, + "loss": 0.0297, + "step": 8060 + }, + { + "epoch": 1.6139999999999999, + "grad_norm": 0.06277505308389664, + "learning_rate": 3.3860000000000004e-05, + "loss": 0.0313, + "step": 8070 + }, + { + "epoch": 1.616, + "grad_norm": 0.04577331617474556, + "learning_rate": 3.384e-05, + "loss": 0.0169, + "step": 8080 + }, + { + "epoch": 1.6179999999999999, + "grad_norm": 0.0067207179963588715, + "learning_rate": 3.3820000000000005e-05, + "loss": 0.0401, + "step": 8090 + }, + { + "epoch": 1.62, + "grad_norm": 0.0028235744684934616, + "learning_rate": 3.38e-05, + "loss": 0.0007, + "step": 8100 + }, + { + "epoch": 1.6219999999999999, + "grad_norm": 0.02251609042286873, + "learning_rate": 3.378e-05, + "loss": 0.0388, + "step": 8110 + }, + { + "epoch": 1.624, + "grad_norm": 0.008003746159374714, + "learning_rate": 3.376e-05, + "loss": 0.0017, + "step": 8120 + }, + { + "epoch": 1.626, + "grad_norm": 0.02333793044090271, + "learning_rate": 3.374e-05, + "loss": 0.0006, + "step": 8130 + }, + { + "epoch": 1.6280000000000001, + "grad_norm": 0.01334973331540823, + "learning_rate": 3.372e-05, + "loss": 0.0095, + "step": 8140 + }, + { + "epoch": 1.63, + "grad_norm": 0.04795200005173683, + "learning_rate": 3.3700000000000006e-05, + "loss": 0.0131, + "step": 8150 + }, + { + "epoch": 1.6320000000000001, + "grad_norm": 0.014241354539990425, + "learning_rate": 3.368e-05, + "loss": 0.0164, + "step": 8160 + }, + { + "epoch": 1.634, + "grad_norm": 0.0266971867531538, + "learning_rate": 3.366e-05, + "loss": 0.0011, + "step": 8170 + }, + { + "epoch": 1.6360000000000001, + "grad_norm": 0.0070693595334887505, + "learning_rate": 3.3639999999999996e-05, + "loss": 0.0207, + "step": 8180 + }, + { + "epoch": 1.638, + "grad_norm": 0.001665965304709971, + "learning_rate": 3.362e-05, + "loss": 0.0005, + "step": 8190 + }, + { + "epoch": 1.6400000000000001, + "grad_norm": 0.0016495983581990004, + "learning_rate": 3.3600000000000004e-05, + "loss": 0.001, + "step": 8200 + }, + { + "epoch": 1.642, + "grad_norm": 0.007819821126759052, + "learning_rate": 3.358e-05, + "loss": 0.0066, + "step": 8210 + }, + { + "epoch": 1.6440000000000001, + "grad_norm": 0.002417607232928276, + "learning_rate": 3.3560000000000004e-05, + "loss": 0.0028, + "step": 8220 + }, + { + "epoch": 1.646, + "grad_norm": 0.016434961929917336, + "learning_rate": 3.354e-05, + "loss": 0.0005, + "step": 8230 + }, + { + "epoch": 1.6480000000000001, + "grad_norm": 7.141128063201904, + "learning_rate": 3.3520000000000004e-05, + "loss": 0.1391, + "step": 8240 + }, + { + "epoch": 1.65, + "grad_norm": 0.012079566717147827, + "learning_rate": 3.35e-05, + "loss": 0.0823, + "step": 8250 + }, + { + "epoch": 1.6520000000000001, + "grad_norm": 0.051017794758081436, + "learning_rate": 3.348e-05, + "loss": 0.0054, + "step": 8260 + }, + { + "epoch": 1.654, + "grad_norm": 0.005140508990734816, + "learning_rate": 3.346e-05, + "loss": 0.0271, + "step": 8270 + }, + { + "epoch": 1.6560000000000001, + "grad_norm": 0.017056677490472794, + "learning_rate": 3.344e-05, + "loss": 0.0892, + "step": 8280 + }, + { + "epoch": 1.658, + "grad_norm": 0.024035515263676643, + "learning_rate": 3.342e-05, + "loss": 0.0075, + "step": 8290 + }, + { + "epoch": 1.6600000000000001, + "grad_norm": 4.052472114562988, + "learning_rate": 3.3400000000000005e-05, + "loss": 0.0297, + "step": 8300 + }, + { + "epoch": 1.662, + "grad_norm": 0.006922661792486906, + "learning_rate": 3.338e-05, + "loss": 0.0558, + "step": 8310 + }, + { + "epoch": 1.6640000000000001, + "grad_norm": 0.018334083259105682, + "learning_rate": 3.336e-05, + "loss": 0.0004, + "step": 8320 + }, + { + "epoch": 1.666, + "grad_norm": 0.011962796561419964, + "learning_rate": 3.3339999999999996e-05, + "loss": 0.0421, + "step": 8330 + }, + { + "epoch": 1.6680000000000001, + "grad_norm": 0.00863931979984045, + "learning_rate": 3.332e-05, + "loss": 0.0386, + "step": 8340 + }, + { + "epoch": 1.67, + "grad_norm": 0.016877811402082443, + "learning_rate": 3.33e-05, + "loss": 0.0013, + "step": 8350 + }, + { + "epoch": 1.6720000000000002, + "grad_norm": 3.0169286727905273, + "learning_rate": 3.328e-05, + "loss": 0.0532, + "step": 8360 + }, + { + "epoch": 1.674, + "grad_norm": 0.00712529057636857, + "learning_rate": 3.3260000000000003e-05, + "loss": 0.0355, + "step": 8370 + }, + { + "epoch": 1.6760000000000002, + "grad_norm": 0.008485515601933002, + "learning_rate": 3.324e-05, + "loss": 0.0014, + "step": 8380 + }, + { + "epoch": 1.678, + "grad_norm": 0.034667760133743286, + "learning_rate": 3.3220000000000004e-05, + "loss": 0.0248, + "step": 8390 + }, + { + "epoch": 1.6800000000000002, + "grad_norm": 0.007112329825758934, + "learning_rate": 3.32e-05, + "loss": 0.0042, + "step": 8400 + }, + { + "epoch": 1.682, + "grad_norm": 0.0033175209537148476, + "learning_rate": 3.318e-05, + "loss": 0.0395, + "step": 8410 + }, + { + "epoch": 1.6840000000000002, + "grad_norm": 4.283847808837891, + "learning_rate": 3.316e-05, + "loss": 0.0193, + "step": 8420 + }, + { + "epoch": 1.686, + "grad_norm": 0.006708692759275436, + "learning_rate": 3.314e-05, + "loss": 0.0008, + "step": 8430 + }, + { + "epoch": 1.688, + "grad_norm": 0.6214526891708374, + "learning_rate": 3.312e-05, + "loss": 0.0018, + "step": 8440 + }, + { + "epoch": 1.69, + "grad_norm": 0.01082578394562006, + "learning_rate": 3.3100000000000005e-05, + "loss": 0.0012, + "step": 8450 + }, + { + "epoch": 1.692, + "grad_norm": 0.016304902732372284, + "learning_rate": 3.308e-05, + "loss": 0.0036, + "step": 8460 + }, + { + "epoch": 1.694, + "grad_norm": 0.12819497287273407, + "learning_rate": 3.3060000000000005e-05, + "loss": 0.0012, + "step": 8470 + }, + { + "epoch": 1.696, + "grad_norm": 0.0035316101275384426, + "learning_rate": 3.304e-05, + "loss": 0.0157, + "step": 8480 + }, + { + "epoch": 1.698, + "grad_norm": 0.0035381668712943792, + "learning_rate": 3.302e-05, + "loss": 0.0006, + "step": 8490 + }, + { + "epoch": 1.7, + "grad_norm": 0.0053281825967133045, + "learning_rate": 3.3e-05, + "loss": 0.0067, + "step": 8500 + }, + { + "epoch": 1.702, + "grad_norm": 0.001686160103417933, + "learning_rate": 3.298e-05, + "loss": 0.0011, + "step": 8510 + }, + { + "epoch": 1.704, + "grad_norm": 0.039347004145383835, + "learning_rate": 3.296e-05, + "loss": 0.0004, + "step": 8520 + }, + { + "epoch": 1.706, + "grad_norm": 0.015970556065440178, + "learning_rate": 3.2940000000000006e-05, + "loss": 0.0412, + "step": 8530 + }, + { + "epoch": 1.708, + "grad_norm": 0.2755222022533417, + "learning_rate": 3.292e-05, + "loss": 0.001, + "step": 8540 + }, + { + "epoch": 1.71, + "grad_norm": 0.03691761940717697, + "learning_rate": 3.29e-05, + "loss": 0.0005, + "step": 8550 + }, + { + "epoch": 1.712, + "grad_norm": 0.002575872465968132, + "learning_rate": 3.288e-05, + "loss": 0.0687, + "step": 8560 + }, + { + "epoch": 1.714, + "grad_norm": 0.17692694067955017, + "learning_rate": 3.286e-05, + "loss": 0.0306, + "step": 8570 + }, + { + "epoch": 1.716, + "grad_norm": 0.0052223242819309235, + "learning_rate": 3.2840000000000004e-05, + "loss": 0.0002, + "step": 8580 + }, + { + "epoch": 1.718, + "grad_norm": 0.014517701230943203, + "learning_rate": 3.282e-05, + "loss": 0.0261, + "step": 8590 + }, + { + "epoch": 1.72, + "grad_norm": 0.002597847953438759, + "learning_rate": 3.2800000000000004e-05, + "loss": 0.0033, + "step": 8600 + }, + { + "epoch": 1.722, + "grad_norm": 0.002658716170117259, + "learning_rate": 3.278e-05, + "loss": 0.0017, + "step": 8610 + }, + { + "epoch": 1.724, + "grad_norm": 0.13059301674365997, + "learning_rate": 3.2760000000000005e-05, + "loss": 0.0492, + "step": 8620 + }, + { + "epoch": 1.726, + "grad_norm": 0.012524668127298355, + "learning_rate": 3.274e-05, + "loss": 0.0004, + "step": 8630 + }, + { + "epoch": 1.728, + "grad_norm": 8.583258628845215, + "learning_rate": 3.272e-05, + "loss": 0.0534, + "step": 8640 + }, + { + "epoch": 1.73, + "grad_norm": 0.0053812297992408276, + "learning_rate": 3.27e-05, + "loss": 0.0078, + "step": 8650 + }, + { + "epoch": 1.732, + "grad_norm": 0.005391386337578297, + "learning_rate": 3.268e-05, + "loss": 0.0112, + "step": 8660 + }, + { + "epoch": 1.734, + "grad_norm": 0.0013112288434058428, + "learning_rate": 3.266e-05, + "loss": 0.0574, + "step": 8670 + }, + { + "epoch": 1.736, + "grad_norm": 0.014318425208330154, + "learning_rate": 3.2640000000000006e-05, + "loss": 0.0364, + "step": 8680 + }, + { + "epoch": 1.738, + "grad_norm": 0.03327896445989609, + "learning_rate": 3.262e-05, + "loss": 0.0168, + "step": 8690 + }, + { + "epoch": 1.74, + "grad_norm": 0.4554899036884308, + "learning_rate": 3.26e-05, + "loss": 0.009, + "step": 8700 + }, + { + "epoch": 1.742, + "grad_norm": 5.39903450012207, + "learning_rate": 3.2579999999999996e-05, + "loss": 0.009, + "step": 8710 + }, + { + "epoch": 1.744, + "grad_norm": 0.006803723517805338, + "learning_rate": 3.256e-05, + "loss": 0.0148, + "step": 8720 + }, + { + "epoch": 1.746, + "grad_norm": 0.005707309115678072, + "learning_rate": 3.2540000000000004e-05, + "loss": 0.0532, + "step": 8730 + }, + { + "epoch": 1.748, + "grad_norm": 0.005006300285458565, + "learning_rate": 3.252e-05, + "loss": 0.0272, + "step": 8740 + }, + { + "epoch": 1.75, + "grad_norm": 0.012628933414816856, + "learning_rate": 3.2500000000000004e-05, + "loss": 0.0625, + "step": 8750 + }, + { + "epoch": 1.752, + "grad_norm": 0.2593526542186737, + "learning_rate": 3.248e-05, + "loss": 0.004, + "step": 8760 + }, + { + "epoch": 1.754, + "grad_norm": 0.012107155285775661, + "learning_rate": 3.2460000000000004e-05, + "loss": 0.0116, + "step": 8770 + }, + { + "epoch": 1.756, + "grad_norm": 0.012258902192115784, + "learning_rate": 3.244e-05, + "loss": 0.0075, + "step": 8780 + }, + { + "epoch": 1.758, + "grad_norm": 0.007064484991133213, + "learning_rate": 3.242e-05, + "loss": 0.0338, + "step": 8790 + }, + { + "epoch": 1.76, + "grad_norm": 0.28371575474739075, + "learning_rate": 3.24e-05, + "loss": 0.0526, + "step": 8800 + }, + { + "epoch": 1.762, + "grad_norm": 3.631308078765869, + "learning_rate": 3.238e-05, + "loss": 0.0622, + "step": 8810 + }, + { + "epoch": 1.764, + "grad_norm": 0.022568710148334503, + "learning_rate": 3.236e-05, + "loss": 0.0618, + "step": 8820 + }, + { + "epoch": 1.766, + "grad_norm": 0.0031279297545552254, + "learning_rate": 3.2340000000000005e-05, + "loss": 0.02, + "step": 8830 + }, + { + "epoch": 1.768, + "grad_norm": 0.0031439117155969143, + "learning_rate": 3.232e-05, + "loss": 0.0007, + "step": 8840 + }, + { + "epoch": 1.77, + "grad_norm": 0.013959056697785854, + "learning_rate": 3.2300000000000006e-05, + "loss": 0.003, + "step": 8850 + }, + { + "epoch": 1.772, + "grad_norm": 0.004907474387437105, + "learning_rate": 3.2279999999999996e-05, + "loss": 0.047, + "step": 8860 + }, + { + "epoch": 1.774, + "grad_norm": 4.480982780456543, + "learning_rate": 3.226e-05, + "loss": 0.0606, + "step": 8870 + }, + { + "epoch": 1.776, + "grad_norm": 0.17286856472492218, + "learning_rate": 3.224e-05, + "loss": 0.0044, + "step": 8880 + }, + { + "epoch": 1.778, + "grad_norm": 0.008956176228821278, + "learning_rate": 3.222e-05, + "loss": 0.0461, + "step": 8890 + }, + { + "epoch": 1.78, + "grad_norm": 0.01405914593487978, + "learning_rate": 3.2200000000000003e-05, + "loss": 0.027, + "step": 8900 + }, + { + "epoch": 1.782, + "grad_norm": 0.02167750708758831, + "learning_rate": 3.218e-05, + "loss": 0.0037, + "step": 8910 + }, + { + "epoch": 1.784, + "grad_norm": 0.009961246512830257, + "learning_rate": 3.2160000000000004e-05, + "loss": 0.0256, + "step": 8920 + }, + { + "epoch": 1.786, + "grad_norm": 0.012369144707918167, + "learning_rate": 3.214e-05, + "loss": 0.0025, + "step": 8930 + }, + { + "epoch": 1.788, + "grad_norm": 0.9500744938850403, + "learning_rate": 3.212e-05, + "loss": 0.0311, + "step": 8940 + }, + { + "epoch": 1.79, + "grad_norm": 0.0016900122864171863, + "learning_rate": 3.21e-05, + "loss": 0.0007, + "step": 8950 + }, + { + "epoch": 1.792, + "grad_norm": 0.002476219553500414, + "learning_rate": 3.208e-05, + "loss": 0.0279, + "step": 8960 + }, + { + "epoch": 1.794, + "grad_norm": 0.004267267417162657, + "learning_rate": 3.206e-05, + "loss": 0.0004, + "step": 8970 + }, + { + "epoch": 1.796, + "grad_norm": 0.04748089611530304, + "learning_rate": 3.2040000000000005e-05, + "loss": 0.0236, + "step": 8980 + }, + { + "epoch": 1.798, + "grad_norm": 0.03387998789548874, + "learning_rate": 3.202e-05, + "loss": 0.0004, + "step": 8990 + }, + { + "epoch": 1.8, + "grad_norm": 0.0028094626031816006, + "learning_rate": 3.2000000000000005e-05, + "loss": 0.0123, + "step": 9000 + }, + { + "epoch": 1.802, + "grad_norm": 0.001941610942594707, + "learning_rate": 3.198e-05, + "loss": 0.0037, + "step": 9010 + }, + { + "epoch": 1.804, + "grad_norm": 5.164578914642334, + "learning_rate": 3.196e-05, + "loss": 0.0457, + "step": 9020 + }, + { + "epoch": 1.806, + "grad_norm": 0.8224331140518188, + "learning_rate": 3.194e-05, + "loss": 0.0011, + "step": 9030 + }, + { + "epoch": 1.808, + "grad_norm": 0.00412099901586771, + "learning_rate": 3.192e-05, + "loss": 0.0061, + "step": 9040 + }, + { + "epoch": 1.81, + "grad_norm": 3.7033016681671143, + "learning_rate": 3.19e-05, + "loss": 0.0528, + "step": 9050 + }, + { + "epoch": 1.812, + "grad_norm": 0.004599456675350666, + "learning_rate": 3.188e-05, + "loss": 0.0008, + "step": 9060 + }, + { + "epoch": 1.814, + "grad_norm": 0.011987713165581226, + "learning_rate": 3.186e-05, + "loss": 0.0007, + "step": 9070 + }, + { + "epoch": 1.8159999999999998, + "grad_norm": 0.006984145380556583, + "learning_rate": 3.184e-05, + "loss": 0.0008, + "step": 9080 + }, + { + "epoch": 1.818, + "grad_norm": 0.0012510290835052729, + "learning_rate": 3.182e-05, + "loss": 0.0006, + "step": 9090 + }, + { + "epoch": 1.8199999999999998, + "grad_norm": 0.008051293902099133, + "learning_rate": 3.18e-05, + "loss": 0.0002, + "step": 9100 + }, + { + "epoch": 1.822, + "grad_norm": 0.001987141091376543, + "learning_rate": 3.1780000000000004e-05, + "loss": 0.0009, + "step": 9110 + }, + { + "epoch": 1.8239999999999998, + "grad_norm": 0.003431364195421338, + "learning_rate": 3.176e-05, + "loss": 0.0038, + "step": 9120 + }, + { + "epoch": 1.826, + "grad_norm": 0.0020495145581662655, + "learning_rate": 3.1740000000000004e-05, + "loss": 0.0013, + "step": 9130 + }, + { + "epoch": 1.8279999999999998, + "grad_norm": 0.005789860151708126, + "learning_rate": 3.172e-05, + "loss": 0.0002, + "step": 9140 + }, + { + "epoch": 1.83, + "grad_norm": 0.012445029802620411, + "learning_rate": 3.1700000000000005e-05, + "loss": 0.0029, + "step": 9150 + }, + { + "epoch": 1.8319999999999999, + "grad_norm": 1.2400474548339844, + "learning_rate": 3.168e-05, + "loss": 0.0008, + "step": 9160 + }, + { + "epoch": 1.834, + "grad_norm": 0.008145655505359173, + "learning_rate": 3.166e-05, + "loss": 0.0047, + "step": 9170 + }, + { + "epoch": 1.8359999999999999, + "grad_norm": 0.001066625933162868, + "learning_rate": 3.164e-05, + "loss": 0.0006, + "step": 9180 + }, + { + "epoch": 1.838, + "grad_norm": 0.00029511668253690004, + "learning_rate": 3.162e-05, + "loss": 0.0787, + "step": 9190 + }, + { + "epoch": 1.8399999999999999, + "grad_norm": 0.003314885776489973, + "learning_rate": 3.16e-05, + "loss": 0.0002, + "step": 9200 + }, + { + "epoch": 1.842, + "grad_norm": 0.00821120385080576, + "learning_rate": 3.1580000000000006e-05, + "loss": 0.0036, + "step": 9210 + }, + { + "epoch": 1.8439999999999999, + "grad_norm": 0.0037178942002356052, + "learning_rate": 3.156e-05, + "loss": 0.0464, + "step": 9220 + }, + { + "epoch": 1.846, + "grad_norm": 0.0036436987575143576, + "learning_rate": 3.154e-05, + "loss": 0.0121, + "step": 9230 + }, + { + "epoch": 1.8479999999999999, + "grad_norm": 0.009318316355347633, + "learning_rate": 3.1519999999999996e-05, + "loss": 0.0007, + "step": 9240 + }, + { + "epoch": 1.85, + "grad_norm": 0.001607164042070508, + "learning_rate": 3.15e-05, + "loss": 0.0448, + "step": 9250 + }, + { + "epoch": 1.8519999999999999, + "grad_norm": 0.011071178130805492, + "learning_rate": 3.1480000000000004e-05, + "loss": 0.0022, + "step": 9260 + }, + { + "epoch": 1.854, + "grad_norm": 0.034108154475688934, + "learning_rate": 3.146e-05, + "loss": 0.0045, + "step": 9270 + }, + { + "epoch": 1.8559999999999999, + "grad_norm": 0.002131303306668997, + "learning_rate": 3.1440000000000004e-05, + "loss": 0.001, + "step": 9280 + }, + { + "epoch": 1.858, + "grad_norm": 0.017153270542621613, + "learning_rate": 3.142e-05, + "loss": 0.001, + "step": 9290 + }, + { + "epoch": 1.8599999999999999, + "grad_norm": 0.036032069474458694, + "learning_rate": 3.1400000000000004e-05, + "loss": 0.0003, + "step": 9300 + }, + { + "epoch": 1.862, + "grad_norm": 0.0025088468100875616, + "learning_rate": 3.138e-05, + "loss": 0.0665, + "step": 9310 + }, + { + "epoch": 1.8639999999999999, + "grad_norm": 5.318429946899414, + "learning_rate": 3.136e-05, + "loss": 0.0054, + "step": 9320 + }, + { + "epoch": 1.866, + "grad_norm": 0.008601468987762928, + "learning_rate": 3.134e-05, + "loss": 0.0413, + "step": 9330 + }, + { + "epoch": 1.8679999999999999, + "grad_norm": 8.6300687789917, + "learning_rate": 3.132e-05, + "loss": 0.0326, + "step": 9340 + }, + { + "epoch": 1.87, + "grad_norm": 0.004777857102453709, + "learning_rate": 3.13e-05, + "loss": 0.0006, + "step": 9350 + }, + { + "epoch": 1.8719999999999999, + "grad_norm": 3.332656145095825, + "learning_rate": 3.1280000000000005e-05, + "loss": 0.0028, + "step": 9360 + }, + { + "epoch": 1.874, + "grad_norm": 0.08430604636669159, + "learning_rate": 3.126e-05, + "loss": 0.0128, + "step": 9370 + }, + { + "epoch": 1.876, + "grad_norm": 0.0012110250536352396, + "learning_rate": 3.1240000000000006e-05, + "loss": 0.0733, + "step": 9380 + }, + { + "epoch": 1.8780000000000001, + "grad_norm": 0.0015350721077993512, + "learning_rate": 3.122e-05, + "loss": 0.0224, + "step": 9390 + }, + { + "epoch": 1.88, + "grad_norm": 0.0013420511968433857, + "learning_rate": 3.12e-05, + "loss": 0.0003, + "step": 9400 + }, + { + "epoch": 1.8820000000000001, + "grad_norm": 0.008449506014585495, + "learning_rate": 3.118e-05, + "loss": 0.0013, + "step": 9410 + }, + { + "epoch": 1.884, + "grad_norm": 0.09631182253360748, + "learning_rate": 3.116e-05, + "loss": 0.013, + "step": 9420 + }, + { + "epoch": 1.8860000000000001, + "grad_norm": 0.004350234754383564, + "learning_rate": 3.1140000000000003e-05, + "loss": 0.0004, + "step": 9430 + }, + { + "epoch": 1.888, + "grad_norm": 0.0016877787420526147, + "learning_rate": 3.112e-05, + "loss": 0.0768, + "step": 9440 + }, + { + "epoch": 1.8900000000000001, + "grad_norm": 6.81647253036499, + "learning_rate": 3.1100000000000004e-05, + "loss": 0.0102, + "step": 9450 + }, + { + "epoch": 1.892, + "grad_norm": 0.028952328488230705, + "learning_rate": 3.108e-05, + "loss": 0.0286, + "step": 9460 + }, + { + "epoch": 1.8940000000000001, + "grad_norm": 0.002540194895118475, + "learning_rate": 3.106e-05, + "loss": 0.0284, + "step": 9470 + }, + { + "epoch": 1.896, + "grad_norm": 0.0016325548058375716, + "learning_rate": 3.104e-05, + "loss": 0.1019, + "step": 9480 + }, + { + "epoch": 1.8980000000000001, + "grad_norm": 0.08182340115308762, + "learning_rate": 3.102e-05, + "loss": 0.0005, + "step": 9490 + }, + { + "epoch": 1.9, + "grad_norm": 0.04047470539808273, + "learning_rate": 3.1e-05, + "loss": 0.0003, + "step": 9500 + }, + { + "epoch": 1.9020000000000001, + "grad_norm": 0.0023045740090310574, + "learning_rate": 3.0980000000000005e-05, + "loss": 0.0216, + "step": 9510 + }, + { + "epoch": 1.904, + "grad_norm": 0.0018172425916418433, + "learning_rate": 3.096e-05, + "loss": 0.0315, + "step": 9520 + }, + { + "epoch": 1.9060000000000001, + "grad_norm": 0.3413315415382385, + "learning_rate": 3.0940000000000005e-05, + "loss": 0.0008, + "step": 9530 + }, + { + "epoch": 1.908, + "grad_norm": 0.002091864123940468, + "learning_rate": 3.092e-05, + "loss": 0.0094, + "step": 9540 + }, + { + "epoch": 1.9100000000000001, + "grad_norm": 0.016177967190742493, + "learning_rate": 3.09e-05, + "loss": 0.0004, + "step": 9550 + }, + { + "epoch": 1.912, + "grad_norm": 3.8514859676361084, + "learning_rate": 3.088e-05, + "loss": 0.1336, + "step": 9560 + }, + { + "epoch": 1.9140000000000001, + "grad_norm": 0.24707883596420288, + "learning_rate": 3.086e-05, + "loss": 0.0268, + "step": 9570 + }, + { + "epoch": 1.916, + "grad_norm": 2.6880104541778564, + "learning_rate": 3.084e-05, + "loss": 0.0048, + "step": 9580 + }, + { + "epoch": 1.9180000000000001, + "grad_norm": 0.06657981127500534, + "learning_rate": 3.082e-05, + "loss": 0.0442, + "step": 9590 + }, + { + "epoch": 1.92, + "grad_norm": 0.39712944626808167, + "learning_rate": 3.08e-05, + "loss": 0.0031, + "step": 9600 + }, + { + "epoch": 1.9220000000000002, + "grad_norm": 0.006004078313708305, + "learning_rate": 3.078e-05, + "loss": 0.0005, + "step": 9610 + }, + { + "epoch": 1.924, + "grad_norm": 7.00140905380249, + "learning_rate": 3.076e-05, + "loss": 0.0547, + "step": 9620 + }, + { + "epoch": 1.9260000000000002, + "grad_norm": 0.016468839719891548, + "learning_rate": 3.074e-05, + "loss": 0.0005, + "step": 9630 + }, + { + "epoch": 1.928, + "grad_norm": 0.06567908078432083, + "learning_rate": 3.072e-05, + "loss": 0.0009, + "step": 9640 + }, + { + "epoch": 1.9300000000000002, + "grad_norm": 11.211073875427246, + "learning_rate": 3.07e-05, + "loss": 0.0558, + "step": 9650 + }, + { + "epoch": 1.932, + "grad_norm": 0.008082142099738121, + "learning_rate": 3.0680000000000004e-05, + "loss": 0.0013, + "step": 9660 + }, + { + "epoch": 1.9340000000000002, + "grad_norm": 10.757091522216797, + "learning_rate": 3.066e-05, + "loss": 0.0194, + "step": 9670 + }, + { + "epoch": 1.936, + "grad_norm": 0.05785878747701645, + "learning_rate": 3.0640000000000005e-05, + "loss": 0.0006, + "step": 9680 + }, + { + "epoch": 1.938, + "grad_norm": 0.002469051396474242, + "learning_rate": 3.062e-05, + "loss": 0.0296, + "step": 9690 + }, + { + "epoch": 1.94, + "grad_norm": 0.006106134038418531, + "learning_rate": 3.06e-05, + "loss": 0.0029, + "step": 9700 + }, + { + "epoch": 1.942, + "grad_norm": 0.0031414697878062725, + "learning_rate": 3.058e-05, + "loss": 0.046, + "step": 9710 + }, + { + "epoch": 1.944, + "grad_norm": 0.011137120425701141, + "learning_rate": 3.056e-05, + "loss": 0.0308, + "step": 9720 + }, + { + "epoch": 1.946, + "grad_norm": 0.03261066600680351, + "learning_rate": 3.054e-05, + "loss": 0.0022, + "step": 9730 + }, + { + "epoch": 1.948, + "grad_norm": 0.05034400150179863, + "learning_rate": 3.0520000000000006e-05, + "loss": 0.0451, + "step": 9740 + }, + { + "epoch": 1.95, + "grad_norm": 2.934372663497925, + "learning_rate": 3.05e-05, + "loss": 0.0029, + "step": 9750 + }, + { + "epoch": 1.952, + "grad_norm": 0.0024814677890390158, + "learning_rate": 3.0480000000000003e-05, + "loss": 0.008, + "step": 9760 + }, + { + "epoch": 1.954, + "grad_norm": 0.012240572832524776, + "learning_rate": 3.046e-05, + "loss": 0.0142, + "step": 9770 + }, + { + "epoch": 1.956, + "grad_norm": 0.012755164876580238, + "learning_rate": 3.0440000000000003e-05, + "loss": 0.0056, + "step": 9780 + }, + { + "epoch": 1.958, + "grad_norm": 0.20030856132507324, + "learning_rate": 3.0420000000000004e-05, + "loss": 0.0093, + "step": 9790 + }, + { + "epoch": 1.96, + "grad_norm": 5.824985980987549, + "learning_rate": 3.04e-05, + "loss": 0.0199, + "step": 9800 + }, + { + "epoch": 1.962, + "grad_norm": 4.64138126373291, + "learning_rate": 3.0380000000000004e-05, + "loss": 0.0224, + "step": 9810 + }, + { + "epoch": 1.964, + "grad_norm": 0.002365650376304984, + "learning_rate": 3.036e-05, + "loss": 0.101, + "step": 9820 + }, + { + "epoch": 1.966, + "grad_norm": 0.0037491919938474894, + "learning_rate": 3.034e-05, + "loss": 0.0226, + "step": 9830 + }, + { + "epoch": 1.968, + "grad_norm": 0.018977370113134384, + "learning_rate": 3.0320000000000004e-05, + "loss": 0.0252, + "step": 9840 + }, + { + "epoch": 1.97, + "grad_norm": 0.006231269333511591, + "learning_rate": 3.03e-05, + "loss": 0.0017, + "step": 9850 + }, + { + "epoch": 1.972, + "grad_norm": 0.0033881240524351597, + "learning_rate": 3.028e-05, + "loss": 0.001, + "step": 9860 + }, + { + "epoch": 1.974, + "grad_norm": 0.0041534435003995895, + "learning_rate": 3.0259999999999998e-05, + "loss": 0.004, + "step": 9870 + }, + { + "epoch": 1.976, + "grad_norm": 7.33493185043335, + "learning_rate": 3.0240000000000002e-05, + "loss": 0.0649, + "step": 9880 + }, + { + "epoch": 1.978, + "grad_norm": 0.004932209383696318, + "learning_rate": 3.0220000000000005e-05, + "loss": 0.0178, + "step": 9890 + }, + { + "epoch": 1.98, + "grad_norm": 0.0016821910394355655, + "learning_rate": 3.02e-05, + "loss": 0.0047, + "step": 9900 + }, + { + "epoch": 1.982, + "grad_norm": 0.004557713866233826, + "learning_rate": 3.0180000000000002e-05, + "loss": 0.0435, + "step": 9910 + }, + { + "epoch": 1.984, + "grad_norm": 5.0917863845825195, + "learning_rate": 3.016e-05, + "loss": 0.0597, + "step": 9920 + }, + { + "epoch": 1.986, + "grad_norm": 0.0021672865841537714, + "learning_rate": 3.0140000000000003e-05, + "loss": 0.0003, + "step": 9930 + }, + { + "epoch": 1.988, + "grad_norm": 0.0008831778541207314, + "learning_rate": 3.0120000000000003e-05, + "loss": 0.0009, + "step": 9940 + }, + { + "epoch": 1.99, + "grad_norm": 0.006184358615428209, + "learning_rate": 3.01e-05, + "loss": 0.0038, + "step": 9950 + }, + { + "epoch": 1.992, + "grad_norm": 0.001267423969693482, + "learning_rate": 3.0080000000000003e-05, + "loss": 0.037, + "step": 9960 + }, + { + "epoch": 1.994, + "grad_norm": 0.005642888136208057, + "learning_rate": 3.006e-05, + "loss": 0.0051, + "step": 9970 + }, + { + "epoch": 1.996, + "grad_norm": 0.0026595494709908962, + "learning_rate": 3.004e-05, + "loss": 0.0655, + "step": 9980 + }, + { + "epoch": 1.998, + "grad_norm": 0.013930058106780052, + "learning_rate": 3.0020000000000004e-05, + "loss": 0.0271, + "step": 9990 + }, + { + "epoch": 2.0, + "grad_norm": 0.004837405402213335, + "learning_rate": 3e-05, + "loss": 0.027, + "step": 10000 + }, + { + "epoch": 2.0, + "eval_loss": 0.018645154312253, + "eval_runtime": 93.8446, + "eval_samples_per_second": 106.559, + "eval_steps_per_second": 13.32, + "step": 10000 + }, + { + "epoch": 2.002, + "grad_norm": 0.022907476872205734, + "learning_rate": 2.998e-05, + "loss": 0.0216, + "step": 10010 + }, + { + "epoch": 2.004, + "grad_norm": 0.19703127443790436, + "learning_rate": 2.9959999999999998e-05, + "loss": 0.002, + "step": 10020 + }, + { + "epoch": 2.006, + "grad_norm": 0.05761381983757019, + "learning_rate": 2.994e-05, + "loss": 0.0056, + "step": 10030 + }, + { + "epoch": 2.008, + "grad_norm": 0.027498705312609673, + "learning_rate": 2.9920000000000005e-05, + "loss": 0.0175, + "step": 10040 + }, + { + "epoch": 2.01, + "grad_norm": 3.6588926315307617, + "learning_rate": 2.9900000000000002e-05, + "loss": 0.0119, + "step": 10050 + }, + { + "epoch": 2.012, + "grad_norm": 0.002984186401590705, + "learning_rate": 2.9880000000000002e-05, + "loss": 0.0121, + "step": 10060 + }, + { + "epoch": 2.014, + "grad_norm": 0.022666804492473602, + "learning_rate": 2.986e-05, + "loss": 0.0006, + "step": 10070 + }, + { + "epoch": 2.016, + "grad_norm": 0.015680214390158653, + "learning_rate": 2.9840000000000002e-05, + "loss": 0.0249, + "step": 10080 + }, + { + "epoch": 2.018, + "grad_norm": 0.010176047682762146, + "learning_rate": 2.9820000000000002e-05, + "loss": 0.0069, + "step": 10090 + }, + { + "epoch": 2.02, + "grad_norm": 0.1402248740196228, + "learning_rate": 2.98e-05, + "loss": 0.001, + "step": 10100 + }, + { + "epoch": 2.022, + "grad_norm": 0.039656609296798706, + "learning_rate": 2.9780000000000003e-05, + "loss": 0.0007, + "step": 10110 + }, + { + "epoch": 2.024, + "grad_norm": 0.0013894840376451612, + "learning_rate": 2.976e-05, + "loss": 0.0026, + "step": 10120 + }, + { + "epoch": 2.026, + "grad_norm": 0.05445275083184242, + "learning_rate": 2.974e-05, + "loss": 0.0387, + "step": 10130 + }, + { + "epoch": 2.028, + "grad_norm": 0.0013749465579167008, + "learning_rate": 2.9720000000000003e-05, + "loss": 0.0383, + "step": 10140 + }, + { + "epoch": 2.03, + "grad_norm": 0.005243366118520498, + "learning_rate": 2.97e-05, + "loss": 0.0094, + "step": 10150 + }, + { + "epoch": 2.032, + "grad_norm": 0.006576089188456535, + "learning_rate": 2.9680000000000004e-05, + "loss": 0.0216, + "step": 10160 + }, + { + "epoch": 2.034, + "grad_norm": 0.0026048733852803707, + "learning_rate": 2.9659999999999997e-05, + "loss": 0.0732, + "step": 10170 + }, + { + "epoch": 2.036, + "grad_norm": 0.006093551870435476, + "learning_rate": 2.964e-05, + "loss": 0.0416, + "step": 10180 + }, + { + "epoch": 2.038, + "grad_norm": 0.014537391252815723, + "learning_rate": 2.9620000000000004e-05, + "loss": 0.0017, + "step": 10190 + }, + { + "epoch": 2.04, + "grad_norm": 0.010529935359954834, + "learning_rate": 2.96e-05, + "loss": 0.0136, + "step": 10200 + }, + { + "epoch": 2.042, + "grad_norm": 2.0856871604919434, + "learning_rate": 2.958e-05, + "loss": 0.0047, + "step": 10210 + }, + { + "epoch": 2.044, + "grad_norm": 0.014749602414667606, + "learning_rate": 2.9559999999999998e-05, + "loss": 0.1169, + "step": 10220 + }, + { + "epoch": 2.046, + "grad_norm": 0.0697651207447052, + "learning_rate": 2.9540000000000002e-05, + "loss": 0.0203, + "step": 10230 + }, + { + "epoch": 2.048, + "grad_norm": 0.016631267964839935, + "learning_rate": 2.9520000000000002e-05, + "loss": 0.0015, + "step": 10240 + }, + { + "epoch": 2.05, + "grad_norm": 0.005667282734066248, + "learning_rate": 2.95e-05, + "loss": 0.0023, + "step": 10250 + }, + { + "epoch": 2.052, + "grad_norm": 0.0238018911331892, + "learning_rate": 2.9480000000000002e-05, + "loss": 0.0029, + "step": 10260 + }, + { + "epoch": 2.054, + "grad_norm": 0.007609200663864613, + "learning_rate": 2.946e-05, + "loss": 0.0003, + "step": 10270 + }, + { + "epoch": 2.056, + "grad_norm": 0.012714263051748276, + "learning_rate": 2.944e-05, + "loss": 0.0015, + "step": 10280 + }, + { + "epoch": 2.058, + "grad_norm": 0.001624607597477734, + "learning_rate": 2.9420000000000003e-05, + "loss": 0.0089, + "step": 10290 + }, + { + "epoch": 2.06, + "grad_norm": 0.057245634496212006, + "learning_rate": 2.94e-05, + "loss": 0.0062, + "step": 10300 + }, + { + "epoch": 2.062, + "grad_norm": 0.1072976291179657, + "learning_rate": 2.9380000000000003e-05, + "loss": 0.001, + "step": 10310 + }, + { + "epoch": 2.064, + "grad_norm": 0.0009422163129784167, + "learning_rate": 2.9360000000000003e-05, + "loss": 0.0004, + "step": 10320 + }, + { + "epoch": 2.066, + "grad_norm": 8.735067367553711, + "learning_rate": 2.934e-05, + "loss": 0.0731, + "step": 10330 + }, + { + "epoch": 2.068, + "grad_norm": 0.0013592693721875548, + "learning_rate": 2.9320000000000004e-05, + "loss": 0.0004, + "step": 10340 + }, + { + "epoch": 2.07, + "grad_norm": 0.03364754840731621, + "learning_rate": 2.93e-05, + "loss": 0.0191, + "step": 10350 + }, + { + "epoch": 2.072, + "grad_norm": 0.004056991543620825, + "learning_rate": 2.928e-05, + "loss": 0.001, + "step": 10360 + }, + { + "epoch": 2.074, + "grad_norm": 0.008462933823466301, + "learning_rate": 2.9260000000000004e-05, + "loss": 0.0009, + "step": 10370 + }, + { + "epoch": 2.076, + "grad_norm": 0.01646353490650654, + "learning_rate": 2.924e-05, + "loss": 0.0046, + "step": 10380 + }, + { + "epoch": 2.078, + "grad_norm": 0.010266261175274849, + "learning_rate": 2.922e-05, + "loss": 0.0382, + "step": 10390 + }, + { + "epoch": 2.08, + "grad_norm": 0.01267939805984497, + "learning_rate": 2.9199999999999998e-05, + "loss": 0.0133, + "step": 10400 + }, + { + "epoch": 2.082, + "grad_norm": 0.0029067820869386196, + "learning_rate": 2.9180000000000002e-05, + "loss": 0.0407, + "step": 10410 + }, + { + "epoch": 2.084, + "grad_norm": 0.004823862109333277, + "learning_rate": 2.9160000000000005e-05, + "loss": 0.0199, + "step": 10420 + }, + { + "epoch": 2.086, + "grad_norm": 9.204266548156738, + "learning_rate": 2.9140000000000002e-05, + "loss": 0.0357, + "step": 10430 + }, + { + "epoch": 2.088, + "grad_norm": 0.0023752956185489893, + "learning_rate": 2.9120000000000002e-05, + "loss": 0.0003, + "step": 10440 + }, + { + "epoch": 2.09, + "grad_norm": 0.1312880516052246, + "learning_rate": 2.91e-05, + "loss": 0.0004, + "step": 10450 + }, + { + "epoch": 2.092, + "grad_norm": 0.0029505027923732996, + "learning_rate": 2.9080000000000003e-05, + "loss": 0.0065, + "step": 10460 + }, + { + "epoch": 2.094, + "grad_norm": 0.009261439554393291, + "learning_rate": 2.9060000000000003e-05, + "loss": 0.0132, + "step": 10470 + }, + { + "epoch": 2.096, + "grad_norm": 5.55691385269165, + "learning_rate": 2.904e-05, + "loss": 0.0151, + "step": 10480 + }, + { + "epoch": 2.098, + "grad_norm": 0.002771316794678569, + "learning_rate": 2.9020000000000003e-05, + "loss": 0.0631, + "step": 10490 + }, + { + "epoch": 2.1, + "grad_norm": 0.006200423464179039, + "learning_rate": 2.9e-05, + "loss": 0.0005, + "step": 10500 + }, + { + "epoch": 2.102, + "grad_norm": 0.9081658720970154, + "learning_rate": 2.898e-05, + "loss": 0.0021, + "step": 10510 + }, + { + "epoch": 2.104, + "grad_norm": 0.008039538748562336, + "learning_rate": 2.8960000000000004e-05, + "loss": 0.0005, + "step": 10520 + }, + { + "epoch": 2.106, + "grad_norm": 0.0029554793145507574, + "learning_rate": 2.894e-05, + "loss": 0.0003, + "step": 10530 + }, + { + "epoch": 2.108, + "grad_norm": 0.046315453946590424, + "learning_rate": 2.8920000000000004e-05, + "loss": 0.001, + "step": 10540 + }, + { + "epoch": 2.11, + "grad_norm": 0.003512721508741379, + "learning_rate": 2.8899999999999998e-05, + "loss": 0.0005, + "step": 10550 + }, + { + "epoch": 2.112, + "grad_norm": 1.5078462362289429, + "learning_rate": 2.888e-05, + "loss": 0.0346, + "step": 10560 + }, + { + "epoch": 2.114, + "grad_norm": 0.017720790579915047, + "learning_rate": 2.8860000000000005e-05, + "loss": 0.0422, + "step": 10570 + }, + { + "epoch": 2.116, + "grad_norm": 0.9635635018348694, + "learning_rate": 2.8840000000000002e-05, + "loss": 0.0434, + "step": 10580 + }, + { + "epoch": 2.118, + "grad_norm": 0.0331871435046196, + "learning_rate": 2.8820000000000002e-05, + "loss": 0.0002, + "step": 10590 + }, + { + "epoch": 2.12, + "grad_norm": 0.0016624679556116462, + "learning_rate": 2.88e-05, + "loss": 0.0006, + "step": 10600 + }, + { + "epoch": 2.122, + "grad_norm": 0.0020574277732521296, + "learning_rate": 2.8780000000000002e-05, + "loss": 0.0219, + "step": 10610 + }, + { + "epoch": 2.124, + "grad_norm": 0.010397090576589108, + "learning_rate": 2.8760000000000002e-05, + "loss": 0.0763, + "step": 10620 + }, + { + "epoch": 2.126, + "grad_norm": 0.0018888169433921576, + "learning_rate": 2.874e-05, + "loss": 0.025, + "step": 10630 + }, + { + "epoch": 2.128, + "grad_norm": 0.0008277022279798985, + "learning_rate": 2.8720000000000003e-05, + "loss": 0.0002, + "step": 10640 + }, + { + "epoch": 2.13, + "grad_norm": 0.005590827204287052, + "learning_rate": 2.87e-05, + "loss": 0.0037, + "step": 10650 + }, + { + "epoch": 2.132, + "grad_norm": 2.0354769229888916, + "learning_rate": 2.868e-05, + "loss": 0.0015, + "step": 10660 + }, + { + "epoch": 2.134, + "grad_norm": 3.090385913848877, + "learning_rate": 2.8660000000000003e-05, + "loss": 0.0193, + "step": 10670 + }, + { + "epoch": 2.136, + "grad_norm": 0.0038130644243210554, + "learning_rate": 2.864e-05, + "loss": 0.0011, + "step": 10680 + }, + { + "epoch": 2.138, + "grad_norm": 0.0742473378777504, + "learning_rate": 2.8620000000000004e-05, + "loss": 0.0508, + "step": 10690 + }, + { + "epoch": 2.14, + "grad_norm": 0.008754884824156761, + "learning_rate": 2.86e-05, + "loss": 0.0269, + "step": 10700 + }, + { + "epoch": 2.142, + "grad_norm": 0.05013411119580269, + "learning_rate": 2.858e-05, + "loss": 0.0007, + "step": 10710 + }, + { + "epoch": 2.144, + "grad_norm": 0.006567743141204119, + "learning_rate": 2.8560000000000004e-05, + "loss": 0.0011, + "step": 10720 + }, + { + "epoch": 2.146, + "grad_norm": 0.0007309935172088444, + "learning_rate": 2.854e-05, + "loss": 0.0311, + "step": 10730 + }, + { + "epoch": 2.148, + "grad_norm": 0.1684633046388626, + "learning_rate": 2.852e-05, + "loss": 0.0496, + "step": 10740 + }, + { + "epoch": 2.15, + "grad_norm": 7.855876922607422, + "learning_rate": 2.8499999999999998e-05, + "loss": 0.0145, + "step": 10750 + }, + { + "epoch": 2.152, + "grad_norm": 0.02232036367058754, + "learning_rate": 2.8480000000000002e-05, + "loss": 0.0366, + "step": 10760 + }, + { + "epoch": 2.154, + "grad_norm": 0.007120494265109301, + "learning_rate": 2.8460000000000002e-05, + "loss": 0.0795, + "step": 10770 + }, + { + "epoch": 2.156, + "grad_norm": 0.06669113039970398, + "learning_rate": 2.844e-05, + "loss": 0.0342, + "step": 10780 + }, + { + "epoch": 2.158, + "grad_norm": 0.004165279679000378, + "learning_rate": 2.8420000000000002e-05, + "loss": 0.0007, + "step": 10790 + }, + { + "epoch": 2.16, + "grad_norm": 0.0076158307492733, + "learning_rate": 2.84e-05, + "loss": 0.0011, + "step": 10800 + }, + { + "epoch": 2.162, + "grad_norm": 0.043945301324129105, + "learning_rate": 2.8380000000000003e-05, + "loss": 0.0146, + "step": 10810 + }, + { + "epoch": 2.164, + "grad_norm": 0.02764349803328514, + "learning_rate": 2.8360000000000003e-05, + "loss": 0.0007, + "step": 10820 + }, + { + "epoch": 2.166, + "grad_norm": 0.006873734761029482, + "learning_rate": 2.834e-05, + "loss": 0.0439, + "step": 10830 + }, + { + "epoch": 2.168, + "grad_norm": 0.1033577173948288, + "learning_rate": 2.8320000000000003e-05, + "loss": 0.0008, + "step": 10840 + }, + { + "epoch": 2.17, + "grad_norm": 0.00217875256203115, + "learning_rate": 2.83e-05, + "loss": 0.0024, + "step": 10850 + }, + { + "epoch": 2.172, + "grad_norm": 0.004244733136147261, + "learning_rate": 2.828e-05, + "loss": 0.0034, + "step": 10860 + }, + { + "epoch": 2.174, + "grad_norm": 2.9411137104034424, + "learning_rate": 2.8260000000000004e-05, + "loss": 0.0159, + "step": 10870 + }, + { + "epoch": 2.176, + "grad_norm": 0.00518818199634552, + "learning_rate": 2.824e-05, + "loss": 0.1114, + "step": 10880 + }, + { + "epoch": 2.178, + "grad_norm": 0.042840536683797836, + "learning_rate": 2.822e-05, + "loss": 0.0241, + "step": 10890 + }, + { + "epoch": 2.18, + "grad_norm": 0.006402214057743549, + "learning_rate": 2.8199999999999998e-05, + "loss": 0.0014, + "step": 10900 + }, + { + "epoch": 2.182, + "grad_norm": 0.053806014358997345, + "learning_rate": 2.818e-05, + "loss": 0.0045, + "step": 10910 + }, + { + "epoch": 2.184, + "grad_norm": 0.019120024517178535, + "learning_rate": 2.816e-05, + "loss": 0.0017, + "step": 10920 + }, + { + "epoch": 2.186, + "grad_norm": 0.050662651658058167, + "learning_rate": 2.8139999999999998e-05, + "loss": 0.0121, + "step": 10930 + }, + { + "epoch": 2.188, + "grad_norm": 0.004558287560939789, + "learning_rate": 2.8120000000000002e-05, + "loss": 0.0123, + "step": 10940 + }, + { + "epoch": 2.19, + "grad_norm": 0.07387060672044754, + "learning_rate": 2.8100000000000005e-05, + "loss": 0.0156, + "step": 10950 + }, + { + "epoch": 2.192, + "grad_norm": 0.013174938969314098, + "learning_rate": 2.8080000000000002e-05, + "loss": 0.0004, + "step": 10960 + }, + { + "epoch": 2.194, + "grad_norm": 0.08706201612949371, + "learning_rate": 2.8060000000000002e-05, + "loss": 0.0035, + "step": 10970 + }, + { + "epoch": 2.196, + "grad_norm": 0.05324581637978554, + "learning_rate": 2.804e-05, + "loss": 0.0738, + "step": 10980 + }, + { + "epoch": 2.198, + "grad_norm": 2.949378252029419, + "learning_rate": 2.8020000000000003e-05, + "loss": 0.0015, + "step": 10990 + }, + { + "epoch": 2.2, + "grad_norm": 0.03104395419359207, + "learning_rate": 2.8000000000000003e-05, + "loss": 0.0143, + "step": 11000 + }, + { + "epoch": 2.202, + "grad_norm": 0.0049886321648955345, + "learning_rate": 2.798e-05, + "loss": 0.0633, + "step": 11010 + }, + { + "epoch": 2.204, + "grad_norm": 0.019628267735242844, + "learning_rate": 2.7960000000000003e-05, + "loss": 0.0004, + "step": 11020 + }, + { + "epoch": 2.206, + "grad_norm": 0.03688063099980354, + "learning_rate": 2.794e-05, + "loss": 0.0004, + "step": 11030 + }, + { + "epoch": 2.208, + "grad_norm": 0.08032286167144775, + "learning_rate": 2.792e-05, + "loss": 0.0799, + "step": 11040 + }, + { + "epoch": 2.21, + "grad_norm": 0.0025247232988476753, + "learning_rate": 2.7900000000000004e-05, + "loss": 0.0017, + "step": 11050 + }, + { + "epoch": 2.212, + "grad_norm": 8.440220832824707, + "learning_rate": 2.788e-05, + "loss": 0.0193, + "step": 11060 + }, + { + "epoch": 2.214, + "grad_norm": 0.019714640453457832, + "learning_rate": 2.7860000000000004e-05, + "loss": 0.022, + "step": 11070 + }, + { + "epoch": 2.216, + "grad_norm": 0.0017304858192801476, + "learning_rate": 2.7839999999999998e-05, + "loss": 0.0128, + "step": 11080 + }, + { + "epoch": 2.218, + "grad_norm": 0.002389088971540332, + "learning_rate": 2.782e-05, + "loss": 0.0003, + "step": 11090 + }, + { + "epoch": 2.22, + "grad_norm": 0.002172302920371294, + "learning_rate": 2.7800000000000005e-05, + "loss": 0.001, + "step": 11100 + }, + { + "epoch": 2.222, + "grad_norm": 0.009733350947499275, + "learning_rate": 2.778e-05, + "loss": 0.0004, + "step": 11110 + }, + { + "epoch": 2.224, + "grad_norm": 0.0028315680101513863, + "learning_rate": 2.7760000000000002e-05, + "loss": 0.0003, + "step": 11120 + }, + { + "epoch": 2.226, + "grad_norm": 0.6095342040061951, + "learning_rate": 2.774e-05, + "loss": 0.0045, + "step": 11130 + }, + { + "epoch": 2.228, + "grad_norm": 0.0029441171791404486, + "learning_rate": 2.7720000000000002e-05, + "loss": 0.0003, + "step": 11140 + }, + { + "epoch": 2.23, + "grad_norm": 0.0030777750071138144, + "learning_rate": 2.7700000000000002e-05, + "loss": 0.0044, + "step": 11150 + }, + { + "epoch": 2.232, + "grad_norm": 0.0014329582918435335, + "learning_rate": 2.768e-05, + "loss": 0.0071, + "step": 11160 + }, + { + "epoch": 2.234, + "grad_norm": 0.0009634523303247988, + "learning_rate": 2.7660000000000003e-05, + "loss": 0.0248, + "step": 11170 + }, + { + "epoch": 2.2359999999999998, + "grad_norm": 0.008729507215321064, + "learning_rate": 2.764e-05, + "loss": 0.0004, + "step": 11180 + }, + { + "epoch": 2.238, + "grad_norm": 0.004630973096936941, + "learning_rate": 2.762e-05, + "loss": 0.0105, + "step": 11190 + }, + { + "epoch": 2.24, + "grad_norm": 0.003606861224398017, + "learning_rate": 2.7600000000000003e-05, + "loss": 0.0251, + "step": 11200 + }, + { + "epoch": 2.242, + "grad_norm": 0.004198799841105938, + "learning_rate": 2.758e-05, + "loss": 0.003, + "step": 11210 + }, + { + "epoch": 2.2439999999999998, + "grad_norm": 0.23881489038467407, + "learning_rate": 2.7560000000000004e-05, + "loss": 0.0153, + "step": 11220 + }, + { + "epoch": 2.246, + "grad_norm": 0.009163503535091877, + "learning_rate": 2.754e-05, + "loss": 0.001, + "step": 11230 + }, + { + "epoch": 2.248, + "grad_norm": 2.6871395111083984, + "learning_rate": 2.752e-05, + "loss": 0.0262, + "step": 11240 + }, + { + "epoch": 2.25, + "grad_norm": 0.02001805603504181, + "learning_rate": 2.7500000000000004e-05, + "loss": 0.0015, + "step": 11250 + }, + { + "epoch": 2.252, + "grad_norm": 0.014580647461116314, + "learning_rate": 2.748e-05, + "loss": 0.0033, + "step": 11260 + }, + { + "epoch": 2.254, + "grad_norm": 0.010014360770583153, + "learning_rate": 2.746e-05, + "loss": 0.0067, + "step": 11270 + }, + { + "epoch": 2.2560000000000002, + "grad_norm": 0.02044554613530636, + "learning_rate": 2.7439999999999998e-05, + "loss": 0.001, + "step": 11280 + }, + { + "epoch": 2.258, + "grad_norm": 0.0019547122064977884, + "learning_rate": 2.7420000000000002e-05, + "loss": 0.0009, + "step": 11290 + }, + { + "epoch": 2.26, + "grad_norm": 0.02893057093024254, + "learning_rate": 2.7400000000000002e-05, + "loss": 0.0019, + "step": 11300 + }, + { + "epoch": 2.262, + "grad_norm": 0.003301553428173065, + "learning_rate": 2.738e-05, + "loss": 0.096, + "step": 11310 + }, + { + "epoch": 2.2640000000000002, + "grad_norm": 0.002293473342433572, + "learning_rate": 2.7360000000000002e-05, + "loss": 0.001, + "step": 11320 + }, + { + "epoch": 2.266, + "grad_norm": 0.0028984423261135817, + "learning_rate": 2.734e-05, + "loss": 0.0014, + "step": 11330 + }, + { + "epoch": 2.268, + "grad_norm": 0.0026602321304380894, + "learning_rate": 2.7320000000000003e-05, + "loss": 0.0655, + "step": 11340 + }, + { + "epoch": 2.27, + "grad_norm": 9.377867698669434, + "learning_rate": 2.7300000000000003e-05, + "loss": 0.0291, + "step": 11350 + }, + { + "epoch": 2.2720000000000002, + "grad_norm": 0.9569777846336365, + "learning_rate": 2.728e-05, + "loss": 0.0017, + "step": 11360 + }, + { + "epoch": 2.274, + "grad_norm": 0.004788199905306101, + "learning_rate": 2.7260000000000003e-05, + "loss": 0.0067, + "step": 11370 + }, + { + "epoch": 2.276, + "grad_norm": 3.850484609603882, + "learning_rate": 2.724e-05, + "loss": 0.0367, + "step": 11380 + }, + { + "epoch": 2.278, + "grad_norm": 0.0053862943314015865, + "learning_rate": 2.722e-05, + "loss": 0.0367, + "step": 11390 + }, + { + "epoch": 2.2800000000000002, + "grad_norm": 0.32168224453926086, + "learning_rate": 2.7200000000000004e-05, + "loss": 0.0047, + "step": 11400 + }, + { + "epoch": 2.282, + "grad_norm": 0.007178562227636576, + "learning_rate": 2.718e-05, + "loss": 0.0009, + "step": 11410 + }, + { + "epoch": 2.284, + "grad_norm": 0.03468381240963936, + "learning_rate": 2.716e-05, + "loss": 0.0003, + "step": 11420 + }, + { + "epoch": 2.286, + "grad_norm": 0.06162317097187042, + "learning_rate": 2.7139999999999998e-05, + "loss": 0.0003, + "step": 11430 + }, + { + "epoch": 2.288, + "grad_norm": 0.0026774811558425426, + "learning_rate": 2.712e-05, + "loss": 0.0012, + "step": 11440 + }, + { + "epoch": 2.29, + "grad_norm": 0.004388803616166115, + "learning_rate": 2.7100000000000005e-05, + "loss": 0.0011, + "step": 11450 + }, + { + "epoch": 2.292, + "grad_norm": 0.007513342425227165, + "learning_rate": 2.7079999999999998e-05, + "loss": 0.0228, + "step": 11460 + }, + { + "epoch": 2.294, + "grad_norm": 0.0048149460926651955, + "learning_rate": 2.7060000000000002e-05, + "loss": 0.1007, + "step": 11470 + }, + { + "epoch": 2.296, + "grad_norm": 0.0016075849998742342, + "learning_rate": 2.704e-05, + "loss": 0.0416, + "step": 11480 + }, + { + "epoch": 2.298, + "grad_norm": 2.9490482807159424, + "learning_rate": 2.7020000000000002e-05, + "loss": 0.0033, + "step": 11490 + }, + { + "epoch": 2.3, + "grad_norm": 0.0028781858272850513, + "learning_rate": 2.7000000000000002e-05, + "loss": 0.0007, + "step": 11500 + }, + { + "epoch": 2.302, + "grad_norm": 7.440994739532471, + "learning_rate": 2.698e-05, + "loss": 0.0077, + "step": 11510 + }, + { + "epoch": 2.304, + "grad_norm": 2.7259159088134766, + "learning_rate": 2.6960000000000003e-05, + "loss": 0.0058, + "step": 11520 + }, + { + "epoch": 2.306, + "grad_norm": 0.0006665542023256421, + "learning_rate": 2.694e-05, + "loss": 0.03, + "step": 11530 + }, + { + "epoch": 2.308, + "grad_norm": 3.5304527282714844, + "learning_rate": 2.692e-05, + "loss": 0.0573, + "step": 11540 + }, + { + "epoch": 2.31, + "grad_norm": 0.026721453294157982, + "learning_rate": 2.6900000000000003e-05, + "loss": 0.0142, + "step": 11550 + }, + { + "epoch": 2.312, + "grad_norm": 0.0015324908308684826, + "learning_rate": 2.688e-05, + "loss": 0.0249, + "step": 11560 + }, + { + "epoch": 2.314, + "grad_norm": 0.01611916720867157, + "learning_rate": 2.686e-05, + "loss": 0.0638, + "step": 11570 + }, + { + "epoch": 2.316, + "grad_norm": 0.010218789801001549, + "learning_rate": 2.6840000000000004e-05, + "loss": 0.0465, + "step": 11580 + }, + { + "epoch": 2.318, + "grad_norm": 0.01205415278673172, + "learning_rate": 2.682e-05, + "loss": 0.0564, + "step": 11590 + }, + { + "epoch": 2.32, + "grad_norm": 0.4571046829223633, + "learning_rate": 2.6800000000000004e-05, + "loss": 0.0273, + "step": 11600 + }, + { + "epoch": 2.322, + "grad_norm": 0.06596440821886063, + "learning_rate": 2.678e-05, + "loss": 0.0008, + "step": 11610 + }, + { + "epoch": 2.324, + "grad_norm": 0.010993361473083496, + "learning_rate": 2.676e-05, + "loss": 0.0364, + "step": 11620 + }, + { + "epoch": 2.326, + "grad_norm": 0.012889869511127472, + "learning_rate": 2.6740000000000005e-05, + "loss": 0.0009, + "step": 11630 + }, + { + "epoch": 2.328, + "grad_norm": 0.04803529009222984, + "learning_rate": 2.672e-05, + "loss": 0.0017, + "step": 11640 + }, + { + "epoch": 2.33, + "grad_norm": 0.005940242204815149, + "learning_rate": 2.6700000000000002e-05, + "loss": 0.0009, + "step": 11650 + }, + { + "epoch": 2.332, + "grad_norm": 0.05164859816431999, + "learning_rate": 2.668e-05, + "loss": 0.004, + "step": 11660 + }, + { + "epoch": 2.334, + "grad_norm": 0.004865141119807959, + "learning_rate": 2.6660000000000002e-05, + "loss": 0.0016, + "step": 11670 + }, + { + "epoch": 2.336, + "grad_norm": 0.0013962871162220836, + "learning_rate": 2.6640000000000002e-05, + "loss": 0.0012, + "step": 11680 + }, + { + "epoch": 2.338, + "grad_norm": 0.0495498850941658, + "learning_rate": 2.662e-05, + "loss": 0.0012, + "step": 11690 + }, + { + "epoch": 2.34, + "grad_norm": 0.002483365125954151, + "learning_rate": 2.6600000000000003e-05, + "loss": 0.001, + "step": 11700 + }, + { + "epoch": 2.342, + "grad_norm": 0.007892883382737637, + "learning_rate": 2.658e-05, + "loss": 0.0019, + "step": 11710 + }, + { + "epoch": 2.344, + "grad_norm": 0.6501008868217468, + "learning_rate": 2.6560000000000003e-05, + "loss": 0.001, + "step": 11720 + }, + { + "epoch": 2.346, + "grad_norm": 0.004912431817501783, + "learning_rate": 2.6540000000000003e-05, + "loss": 0.0446, + "step": 11730 + }, + { + "epoch": 2.348, + "grad_norm": 0.0070532746613025665, + "learning_rate": 2.652e-05, + "loss": 0.0337, + "step": 11740 + }, + { + "epoch": 2.35, + "grad_norm": 0.0037770241033285856, + "learning_rate": 2.6500000000000004e-05, + "loss": 0.0197, + "step": 11750 + }, + { + "epoch": 2.352, + "grad_norm": 0.09849817305803299, + "learning_rate": 2.648e-05, + "loss": 0.0279, + "step": 11760 + }, + { + "epoch": 2.354, + "grad_norm": 0.0006417520926333964, + "learning_rate": 2.646e-05, + "loss": 0.001, + "step": 11770 + }, + { + "epoch": 2.356, + "grad_norm": 0.00028482265770435333, + "learning_rate": 2.6440000000000004e-05, + "loss": 0.0012, + "step": 11780 + }, + { + "epoch": 2.358, + "grad_norm": 0.03328564390540123, + "learning_rate": 2.642e-05, + "loss": 0.1101, + "step": 11790 + }, + { + "epoch": 2.36, + "grad_norm": 0.007043204270303249, + "learning_rate": 2.64e-05, + "loss": 0.0003, + "step": 11800 + }, + { + "epoch": 2.362, + "grad_norm": 0.016940118744969368, + "learning_rate": 2.6379999999999998e-05, + "loss": 0.0054, + "step": 11810 + }, + { + "epoch": 2.364, + "grad_norm": 0.008778809569776058, + "learning_rate": 2.6360000000000002e-05, + "loss": 0.0008, + "step": 11820 + }, + { + "epoch": 2.366, + "grad_norm": 0.005758294835686684, + "learning_rate": 2.6340000000000002e-05, + "loss": 0.0003, + "step": 11830 + }, + { + "epoch": 2.368, + "grad_norm": 0.026752840727567673, + "learning_rate": 2.632e-05, + "loss": 0.0101, + "step": 11840 + }, + { + "epoch": 2.37, + "grad_norm": 0.009118887595832348, + "learning_rate": 2.6300000000000002e-05, + "loss": 0.0005, + "step": 11850 + }, + { + "epoch": 2.372, + "grad_norm": 0.0074037835001945496, + "learning_rate": 2.628e-05, + "loss": 0.0007, + "step": 11860 + }, + { + "epoch": 2.374, + "grad_norm": 0.005524361506104469, + "learning_rate": 2.6260000000000003e-05, + "loss": 0.0003, + "step": 11870 + }, + { + "epoch": 2.376, + "grad_norm": 0.0064158011227846146, + "learning_rate": 2.6240000000000003e-05, + "loss": 0.0239, + "step": 11880 + }, + { + "epoch": 2.378, + "grad_norm": 0.009333696216344833, + "learning_rate": 2.622e-05, + "loss": 0.0009, + "step": 11890 + }, + { + "epoch": 2.38, + "grad_norm": 0.0013852640986442566, + "learning_rate": 2.6200000000000003e-05, + "loss": 0.0047, + "step": 11900 + }, + { + "epoch": 2.382, + "grad_norm": 0.002884730463847518, + "learning_rate": 2.618e-05, + "loss": 0.0008, + "step": 11910 + }, + { + "epoch": 2.384, + "grad_norm": 0.02573120780289173, + "learning_rate": 2.616e-05, + "loss": 0.0798, + "step": 11920 + }, + { + "epoch": 2.386, + "grad_norm": 0.004389532376080751, + "learning_rate": 2.6140000000000004e-05, + "loss": 0.0109, + "step": 11930 + }, + { + "epoch": 2.388, + "grad_norm": 0.017901601269841194, + "learning_rate": 2.612e-05, + "loss": 0.0004, + "step": 11940 + }, + { + "epoch": 2.39, + "grad_norm": 2.5958914756774902, + "learning_rate": 2.61e-05, + "loss": 0.048, + "step": 11950 + }, + { + "epoch": 2.392, + "grad_norm": 0.0030245562084019184, + "learning_rate": 2.6079999999999998e-05, + "loss": 0.0384, + "step": 11960 + }, + { + "epoch": 2.394, + "grad_norm": 0.001944447518326342, + "learning_rate": 2.606e-05, + "loss": 0.014, + "step": 11970 + }, + { + "epoch": 2.396, + "grad_norm": 0.013363710604608059, + "learning_rate": 2.6040000000000005e-05, + "loss": 0.0268, + "step": 11980 + }, + { + "epoch": 2.398, + "grad_norm": 0.016894085332751274, + "learning_rate": 2.602e-05, + "loss": 0.0021, + "step": 11990 + }, + { + "epoch": 2.4, + "grad_norm": 0.008409597910940647, + "learning_rate": 2.6000000000000002e-05, + "loss": 0.0023, + "step": 12000 + }, + { + "epoch": 2.402, + "grad_norm": 0.030583485960960388, + "learning_rate": 2.598e-05, + "loss": 0.0953, + "step": 12010 + }, + { + "epoch": 2.404, + "grad_norm": 0.01641802117228508, + "learning_rate": 2.5960000000000002e-05, + "loss": 0.0008, + "step": 12020 + }, + { + "epoch": 2.406, + "grad_norm": 0.0023062105756253004, + "learning_rate": 2.5940000000000002e-05, + "loss": 0.0087, + "step": 12030 + }, + { + "epoch": 2.408, + "grad_norm": 0.006509549915790558, + "learning_rate": 2.592e-05, + "loss": 0.0081, + "step": 12040 + }, + { + "epoch": 2.41, + "grad_norm": 0.0068439035676419735, + "learning_rate": 2.5900000000000003e-05, + "loss": 0.0253, + "step": 12050 + }, + { + "epoch": 2.412, + "grad_norm": 0.005552592687308788, + "learning_rate": 2.588e-05, + "loss": 0.0326, + "step": 12060 + }, + { + "epoch": 2.414, + "grad_norm": 0.011574423871934414, + "learning_rate": 2.586e-05, + "loss": 0.0318, + "step": 12070 + }, + { + "epoch": 2.416, + "grad_norm": 0.0022241093683987856, + "learning_rate": 2.5840000000000003e-05, + "loss": 0.0006, + "step": 12080 + }, + { + "epoch": 2.418, + "grad_norm": 0.00799054279923439, + "learning_rate": 2.582e-05, + "loss": 0.0007, + "step": 12090 + }, + { + "epoch": 2.42, + "grad_norm": 0.09423800557851791, + "learning_rate": 2.58e-05, + "loss": 0.0013, + "step": 12100 + }, + { + "epoch": 2.422, + "grad_norm": 0.005380244459956884, + "learning_rate": 2.5779999999999997e-05, + "loss": 0.0338, + "step": 12110 + }, + { + "epoch": 2.424, + "grad_norm": 0.025284456089138985, + "learning_rate": 2.576e-05, + "loss": 0.0023, + "step": 12120 + }, + { + "epoch": 2.426, + "grad_norm": 0.005192068871110678, + "learning_rate": 2.5740000000000004e-05, + "loss": 0.0063, + "step": 12130 + }, + { + "epoch": 2.428, + "grad_norm": 0.006014976650476456, + "learning_rate": 2.572e-05, + "loss": 0.0006, + "step": 12140 + }, + { + "epoch": 2.43, + "grad_norm": 0.0024557739961892366, + "learning_rate": 2.57e-05, + "loss": 0.0164, + "step": 12150 + }, + { + "epoch": 2.432, + "grad_norm": 0.03051261603832245, + "learning_rate": 2.5679999999999998e-05, + "loss": 0.0009, + "step": 12160 + }, + { + "epoch": 2.434, + "grad_norm": 0.00140279158949852, + "learning_rate": 2.566e-05, + "loss": 0.0005, + "step": 12170 + }, + { + "epoch": 2.436, + "grad_norm": 5.22982931137085, + "learning_rate": 2.5640000000000002e-05, + "loss": 0.0207, + "step": 12180 + }, + { + "epoch": 2.438, + "grad_norm": 0.003986484836786985, + "learning_rate": 2.562e-05, + "loss": 0.0008, + "step": 12190 + }, + { + "epoch": 2.44, + "grad_norm": 0.0018658132757991552, + "learning_rate": 2.5600000000000002e-05, + "loss": 0.0142, + "step": 12200 + }, + { + "epoch": 2.442, + "grad_norm": 0.905129611492157, + "learning_rate": 2.5580000000000002e-05, + "loss": 0.0026, + "step": 12210 + }, + { + "epoch": 2.444, + "grad_norm": 0.0045546796172857285, + "learning_rate": 2.556e-05, + "loss": 0.0922, + "step": 12220 + }, + { + "epoch": 2.446, + "grad_norm": 0.02099277451634407, + "learning_rate": 2.5540000000000003e-05, + "loss": 0.0006, + "step": 12230 + }, + { + "epoch": 2.448, + "grad_norm": 0.01280425488948822, + "learning_rate": 2.552e-05, + "loss": 0.0101, + "step": 12240 + }, + { + "epoch": 2.45, + "grad_norm": 5.127621650695801, + "learning_rate": 2.5500000000000003e-05, + "loss": 0.0715, + "step": 12250 + }, + { + "epoch": 2.452, + "grad_norm": 0.006172411143779755, + "learning_rate": 2.5480000000000003e-05, + "loss": 0.0008, + "step": 12260 + }, + { + "epoch": 2.454, + "grad_norm": 0.0678882971405983, + "learning_rate": 2.546e-05, + "loss": 0.0009, + "step": 12270 + }, + { + "epoch": 2.456, + "grad_norm": 0.003381960093975067, + "learning_rate": 2.5440000000000004e-05, + "loss": 0.0173, + "step": 12280 + }, + { + "epoch": 2.458, + "grad_norm": 0.005731347016990185, + "learning_rate": 2.542e-05, + "loss": 0.0114, + "step": 12290 + }, + { + "epoch": 2.46, + "grad_norm": 0.058216921985149384, + "learning_rate": 2.54e-05, + "loss": 0.0034, + "step": 12300 + }, + { + "epoch": 2.462, + "grad_norm": 0.003912481013685465, + "learning_rate": 2.5380000000000004e-05, + "loss": 0.0001, + "step": 12310 + }, + { + "epoch": 2.464, + "grad_norm": 1.2991969585418701, + "learning_rate": 2.536e-05, + "loss": 0.0021, + "step": 12320 + }, + { + "epoch": 2.466, + "grad_norm": 0.008072658441960812, + "learning_rate": 2.534e-05, + "loss": 0.0082, + "step": 12330 + }, + { + "epoch": 2.468, + "grad_norm": 0.1591532677412033, + "learning_rate": 2.5319999999999998e-05, + "loss": 0.0006, + "step": 12340 + }, + { + "epoch": 2.4699999999999998, + "grad_norm": 0.002749226987361908, + "learning_rate": 2.5300000000000002e-05, + "loss": 0.0003, + "step": 12350 + }, + { + "epoch": 2.472, + "grad_norm": 0.013632843270897865, + "learning_rate": 2.5280000000000005e-05, + "loss": 0.0491, + "step": 12360 + }, + { + "epoch": 2.474, + "grad_norm": 0.002205546712502837, + "learning_rate": 2.526e-05, + "loss": 0.0003, + "step": 12370 + }, + { + "epoch": 2.476, + "grad_norm": 0.0033999320585280657, + "learning_rate": 2.5240000000000002e-05, + "loss": 0.0119, + "step": 12380 + }, + { + "epoch": 2.4779999999999998, + "grad_norm": 0.004555273801088333, + "learning_rate": 2.522e-05, + "loss": 0.0013, + "step": 12390 + }, + { + "epoch": 2.48, + "grad_norm": 0.02334682084619999, + "learning_rate": 2.5200000000000003e-05, + "loss": 0.0181, + "step": 12400 + }, + { + "epoch": 2.482, + "grad_norm": 0.0025895722210407257, + "learning_rate": 2.5180000000000003e-05, + "loss": 0.0042, + "step": 12410 + }, + { + "epoch": 2.484, + "grad_norm": 0.05390416085720062, + "learning_rate": 2.516e-05, + "loss": 0.0011, + "step": 12420 + }, + { + "epoch": 2.4859999999999998, + "grad_norm": 0.0009085657657124102, + "learning_rate": 2.5140000000000003e-05, + "loss": 0.0284, + "step": 12430 + }, + { + "epoch": 2.488, + "grad_norm": 0.26577678322792053, + "learning_rate": 2.512e-05, + "loss": 0.001, + "step": 12440 + }, + { + "epoch": 2.49, + "grad_norm": 0.0048172916285693645, + "learning_rate": 2.51e-05, + "loss": 0.0004, + "step": 12450 + }, + { + "epoch": 2.492, + "grad_norm": 0.009927072562277317, + "learning_rate": 2.5080000000000004e-05, + "loss": 0.0482, + "step": 12460 + }, + { + "epoch": 2.4939999999999998, + "grad_norm": 0.0019616412464529276, + "learning_rate": 2.506e-05, + "loss": 0.0765, + "step": 12470 + }, + { + "epoch": 2.496, + "grad_norm": 0.0017744253855198622, + "learning_rate": 2.504e-05, + "loss": 0.0003, + "step": 12480 + }, + { + "epoch": 2.498, + "grad_norm": 0.012268969789147377, + "learning_rate": 2.5019999999999998e-05, + "loss": 0.0175, + "step": 12490 + }, + { + "epoch": 2.5, + "grad_norm": 5.593498229980469, + "learning_rate": 2.5e-05, + "loss": 0.0125, + "step": 12500 + }, + { + "epoch": 2.502, + "grad_norm": 0.039957594126462936, + "learning_rate": 2.498e-05, + "loss": 0.0034, + "step": 12510 + }, + { + "epoch": 2.504, + "grad_norm": 0.00404326943680644, + "learning_rate": 2.496e-05, + "loss": 0.0004, + "step": 12520 + }, + { + "epoch": 2.5060000000000002, + "grad_norm": 0.0023503380361944437, + "learning_rate": 2.4940000000000002e-05, + "loss": 0.0075, + "step": 12530 + }, + { + "epoch": 2.508, + "grad_norm": 0.005890808999538422, + "learning_rate": 2.4920000000000002e-05, + "loss": 0.0006, + "step": 12540 + }, + { + "epoch": 2.51, + "grad_norm": 0.0037465377245098352, + "learning_rate": 2.4900000000000002e-05, + "loss": 0.0015, + "step": 12550 + }, + { + "epoch": 2.512, + "grad_norm": 0.001902339281514287, + "learning_rate": 2.488e-05, + "loss": 0.0008, + "step": 12560 + }, + { + "epoch": 2.5140000000000002, + "grad_norm": 0.035102877765893936, + "learning_rate": 2.486e-05, + "loss": 0.0013, + "step": 12570 + }, + { + "epoch": 2.516, + "grad_norm": 0.00217231223359704, + "learning_rate": 2.4840000000000003e-05, + "loss": 0.0913, + "step": 12580 + }, + { + "epoch": 2.518, + "grad_norm": 0.00795662309974432, + "learning_rate": 2.4820000000000003e-05, + "loss": 0.0363, + "step": 12590 + }, + { + "epoch": 2.52, + "grad_norm": 0.007800019811838865, + "learning_rate": 2.48e-05, + "loss": 0.0185, + "step": 12600 + }, + { + "epoch": 2.5220000000000002, + "grad_norm": 0.005222728475928307, + "learning_rate": 2.478e-05, + "loss": 0.0047, + "step": 12610 + }, + { + "epoch": 2.524, + "grad_norm": 0.004378939513117075, + "learning_rate": 2.476e-05, + "loss": 0.0004, + "step": 12620 + }, + { + "epoch": 2.526, + "grad_norm": 0.015952318906784058, + "learning_rate": 2.4740000000000004e-05, + "loss": 0.0019, + "step": 12630 + }, + { + "epoch": 2.528, + "grad_norm": 0.001999328611418605, + "learning_rate": 2.472e-05, + "loss": 0.0324, + "step": 12640 + }, + { + "epoch": 2.5300000000000002, + "grad_norm": 0.03502897545695305, + "learning_rate": 2.47e-05, + "loss": 0.039, + "step": 12650 + }, + { + "epoch": 2.532, + "grad_norm": 2.233900785446167, + "learning_rate": 2.468e-05, + "loss": 0.0422, + "step": 12660 + }, + { + "epoch": 2.534, + "grad_norm": 0.02098439261317253, + "learning_rate": 2.466e-05, + "loss": 0.0364, + "step": 12670 + }, + { + "epoch": 2.536, + "grad_norm": 0.005417154636234045, + "learning_rate": 2.464e-05, + "loss": 0.0863, + "step": 12680 + }, + { + "epoch": 2.5380000000000003, + "grad_norm": 0.0077430736273527145, + "learning_rate": 2.462e-05, + "loss": 0.008, + "step": 12690 + }, + { + "epoch": 2.54, + "grad_norm": 0.00703767966479063, + "learning_rate": 2.46e-05, + "loss": 0.0017, + "step": 12700 + }, + { + "epoch": 2.542, + "grad_norm": 6.646230220794678, + "learning_rate": 2.4580000000000002e-05, + "loss": 0.0383, + "step": 12710 + }, + { + "epoch": 2.544, + "grad_norm": 0.011931142769753933, + "learning_rate": 2.4560000000000002e-05, + "loss": 0.0024, + "step": 12720 + }, + { + "epoch": 2.5460000000000003, + "grad_norm": 0.006545823533087969, + "learning_rate": 2.4540000000000002e-05, + "loss": 0.051, + "step": 12730 + }, + { + "epoch": 2.548, + "grad_norm": 0.004485706333070993, + "learning_rate": 2.4520000000000002e-05, + "loss": 0.0014, + "step": 12740 + }, + { + "epoch": 2.55, + "grad_norm": 6.2546892166137695, + "learning_rate": 2.45e-05, + "loss": 0.0658, + "step": 12750 + }, + { + "epoch": 2.552, + "grad_norm": 0.024582767859101295, + "learning_rate": 2.448e-05, + "loss": 0.0048, + "step": 12760 + }, + { + "epoch": 2.5540000000000003, + "grad_norm": 0.009772568009793758, + "learning_rate": 2.4460000000000003e-05, + "loss": 0.0011, + "step": 12770 + }, + { + "epoch": 2.556, + "grad_norm": 0.3461325168609619, + "learning_rate": 2.4440000000000003e-05, + "loss": 0.0012, + "step": 12780 + }, + { + "epoch": 2.558, + "grad_norm": 0.004639799706637859, + "learning_rate": 2.442e-05, + "loss": 0.0037, + "step": 12790 + }, + { + "epoch": 2.56, + "grad_norm": 0.018950438126921654, + "learning_rate": 2.44e-05, + "loss": 0.0105, + "step": 12800 + }, + { + "epoch": 2.5620000000000003, + "grad_norm": 0.010657204315066338, + "learning_rate": 2.438e-05, + "loss": 0.0208, + "step": 12810 + }, + { + "epoch": 2.564, + "grad_norm": 0.0076634702272713184, + "learning_rate": 2.4360000000000004e-05, + "loss": 0.0008, + "step": 12820 + }, + { + "epoch": 2.566, + "grad_norm": 0.004654941149055958, + "learning_rate": 2.434e-05, + "loss": 0.0023, + "step": 12830 + }, + { + "epoch": 2.568, + "grad_norm": 0.010933748446404934, + "learning_rate": 2.432e-05, + "loss": 0.0039, + "step": 12840 + }, + { + "epoch": 2.57, + "grad_norm": 9.478780746459961, + "learning_rate": 2.43e-05, + "loss": 0.0268, + "step": 12850 + }, + { + "epoch": 2.572, + "grad_norm": 4.93724250793457, + "learning_rate": 2.428e-05, + "loss": 0.0525, + "step": 12860 + }, + { + "epoch": 2.574, + "grad_norm": 0.006643955130130053, + "learning_rate": 2.426e-05, + "loss": 0.0006, + "step": 12870 + }, + { + "epoch": 2.576, + "grad_norm": 0.06824769824743271, + "learning_rate": 2.4240000000000002e-05, + "loss": 0.058, + "step": 12880 + }, + { + "epoch": 2.578, + "grad_norm": 0.003721135901287198, + "learning_rate": 2.4220000000000002e-05, + "loss": 0.0022, + "step": 12890 + }, + { + "epoch": 2.58, + "grad_norm": 0.076459601521492, + "learning_rate": 2.4200000000000002e-05, + "loss": 0.0114, + "step": 12900 + }, + { + "epoch": 2.582, + "grad_norm": 0.0034186700358986855, + "learning_rate": 2.418e-05, + "loss": 0.0252, + "step": 12910 + }, + { + "epoch": 2.584, + "grad_norm": 0.019875500351190567, + "learning_rate": 2.4160000000000002e-05, + "loss": 0.0146, + "step": 12920 + }, + { + "epoch": 2.586, + "grad_norm": 0.004298020154237747, + "learning_rate": 2.4140000000000003e-05, + "loss": 0.0044, + "step": 12930 + }, + { + "epoch": 2.588, + "grad_norm": 0.005100682843476534, + "learning_rate": 2.412e-05, + "loss": 0.0016, + "step": 12940 + }, + { + "epoch": 2.59, + "grad_norm": 14.05281925201416, + "learning_rate": 2.41e-05, + "loss": 0.0197, + "step": 12950 + }, + { + "epoch": 2.592, + "grad_norm": 0.5248048305511475, + "learning_rate": 2.408e-05, + "loss": 0.001, + "step": 12960 + }, + { + "epoch": 2.594, + "grad_norm": 0.0029409376438707113, + "learning_rate": 2.4060000000000003e-05, + "loss": 0.0398, + "step": 12970 + }, + { + "epoch": 2.596, + "grad_norm": 0.005220264196395874, + "learning_rate": 2.404e-05, + "loss": 0.0123, + "step": 12980 + }, + { + "epoch": 2.598, + "grad_norm": 0.004500200040638447, + "learning_rate": 2.402e-05, + "loss": 0.0084, + "step": 12990 + }, + { + "epoch": 2.6, + "grad_norm": 0.016876282170414925, + "learning_rate": 2.4e-05, + "loss": 0.0004, + "step": 13000 + }, + { + "epoch": 2.602, + "grad_norm": 0.00897781178355217, + "learning_rate": 2.398e-05, + "loss": 0.0004, + "step": 13010 + }, + { + "epoch": 2.604, + "grad_norm": 0.8232995867729187, + "learning_rate": 2.396e-05, + "loss": 0.0175, + "step": 13020 + }, + { + "epoch": 2.606, + "grad_norm": 2.4105169773101807, + "learning_rate": 2.394e-05, + "loss": 0.0417, + "step": 13030 + }, + { + "epoch": 2.608, + "grad_norm": 0.003810339141637087, + "learning_rate": 2.392e-05, + "loss": 0.0004, + "step": 13040 + }, + { + "epoch": 2.61, + "grad_norm": 0.040436211973428726, + "learning_rate": 2.39e-05, + "loss": 0.0008, + "step": 13050 + }, + { + "epoch": 2.612, + "grad_norm": 0.01878192462027073, + "learning_rate": 2.3880000000000002e-05, + "loss": 0.075, + "step": 13060 + }, + { + "epoch": 2.614, + "grad_norm": 0.005005359649658203, + "learning_rate": 2.3860000000000002e-05, + "loss": 0.0587, + "step": 13070 + }, + { + "epoch": 2.616, + "grad_norm": 0.004173360764980316, + "learning_rate": 2.3840000000000002e-05, + "loss": 0.0152, + "step": 13080 + }, + { + "epoch": 2.618, + "grad_norm": 0.003978619817644358, + "learning_rate": 2.3820000000000002e-05, + "loss": 0.0693, + "step": 13090 + }, + { + "epoch": 2.62, + "grad_norm": 0.014284418895840645, + "learning_rate": 2.38e-05, + "loss": 0.0008, + "step": 13100 + }, + { + "epoch": 2.622, + "grad_norm": 0.012868880294263363, + "learning_rate": 2.3780000000000003e-05, + "loss": 0.0027, + "step": 13110 + }, + { + "epoch": 2.624, + "grad_norm": 0.021820498630404472, + "learning_rate": 2.3760000000000003e-05, + "loss": 0.0013, + "step": 13120 + }, + { + "epoch": 2.626, + "grad_norm": 0.020173553377389908, + "learning_rate": 2.374e-05, + "loss": 0.0088, + "step": 13130 + }, + { + "epoch": 2.628, + "grad_norm": 0.023448575288057327, + "learning_rate": 2.372e-05, + "loss": 0.0008, + "step": 13140 + }, + { + "epoch": 2.63, + "grad_norm": 0.012240973301231861, + "learning_rate": 2.37e-05, + "loss": 0.0108, + "step": 13150 + }, + { + "epoch": 2.632, + "grad_norm": 0.36354461312294006, + "learning_rate": 2.3680000000000004e-05, + "loss": 0.0447, + "step": 13160 + }, + { + "epoch": 2.634, + "grad_norm": 0.6124907732009888, + "learning_rate": 2.366e-05, + "loss": 0.0096, + "step": 13170 + }, + { + "epoch": 2.636, + "grad_norm": 0.007227941881865263, + "learning_rate": 2.364e-05, + "loss": 0.0014, + "step": 13180 + }, + { + "epoch": 2.638, + "grad_norm": 0.0026091760955750942, + "learning_rate": 2.362e-05, + "loss": 0.0097, + "step": 13190 + }, + { + "epoch": 2.64, + "grad_norm": 0.004290744196623564, + "learning_rate": 2.36e-05, + "loss": 0.0008, + "step": 13200 + }, + { + "epoch": 2.642, + "grad_norm": 0.003058998379856348, + "learning_rate": 2.358e-05, + "loss": 0.0005, + "step": 13210 + }, + { + "epoch": 2.644, + "grad_norm": 0.00327027915045619, + "learning_rate": 2.356e-05, + "loss": 0.0437, + "step": 13220 + }, + { + "epoch": 2.646, + "grad_norm": 0.02532695233821869, + "learning_rate": 2.354e-05, + "loss": 0.0613, + "step": 13230 + }, + { + "epoch": 2.648, + "grad_norm": 2.3604378700256348, + "learning_rate": 2.3520000000000002e-05, + "loss": 0.0556, + "step": 13240 + }, + { + "epoch": 2.65, + "grad_norm": 8.695077896118164, + "learning_rate": 2.35e-05, + "loss": 0.0323, + "step": 13250 + }, + { + "epoch": 2.652, + "grad_norm": 0.009923191741108894, + "learning_rate": 2.3480000000000002e-05, + "loss": 0.001, + "step": 13260 + }, + { + "epoch": 2.654, + "grad_norm": 0.029513264074921608, + "learning_rate": 2.3460000000000002e-05, + "loss": 0.0006, + "step": 13270 + }, + { + "epoch": 2.656, + "grad_norm": 3.3521392345428467, + "learning_rate": 2.344e-05, + "loss": 0.0318, + "step": 13280 + }, + { + "epoch": 2.658, + "grad_norm": 0.2049526572227478, + "learning_rate": 2.342e-05, + "loss": 0.0025, + "step": 13290 + }, + { + "epoch": 2.66, + "grad_norm": 0.007929221726953983, + "learning_rate": 2.3400000000000003e-05, + "loss": 0.0016, + "step": 13300 + }, + { + "epoch": 2.662, + "grad_norm": 0.002996771829202771, + "learning_rate": 2.3380000000000003e-05, + "loss": 0.0028, + "step": 13310 + }, + { + "epoch": 2.664, + "grad_norm": 0.007703165523707867, + "learning_rate": 2.336e-05, + "loss": 0.0006, + "step": 13320 + }, + { + "epoch": 2.666, + "grad_norm": 0.003229381749406457, + "learning_rate": 2.334e-05, + "loss": 0.002, + "step": 13330 + }, + { + "epoch": 2.668, + "grad_norm": 0.014454679563641548, + "learning_rate": 2.332e-05, + "loss": 0.0713, + "step": 13340 + }, + { + "epoch": 2.67, + "grad_norm": 0.008571525104343891, + "learning_rate": 2.3300000000000004e-05, + "loss": 0.0019, + "step": 13350 + }, + { + "epoch": 2.672, + "grad_norm": 0.006012748461216688, + "learning_rate": 2.328e-05, + "loss": 0.0296, + "step": 13360 + }, + { + "epoch": 2.674, + "grad_norm": 0.004160825163125992, + "learning_rate": 2.326e-05, + "loss": 0.0336, + "step": 13370 + }, + { + "epoch": 2.676, + "grad_norm": 0.031649768352508545, + "learning_rate": 2.324e-05, + "loss": 0.019, + "step": 13380 + }, + { + "epoch": 2.678, + "grad_norm": 0.019651921465992928, + "learning_rate": 2.322e-05, + "loss": 0.0407, + "step": 13390 + }, + { + "epoch": 2.68, + "grad_norm": 0.028730370104312897, + "learning_rate": 2.32e-05, + "loss": 0.0735, + "step": 13400 + }, + { + "epoch": 2.682, + "grad_norm": 0.06920024007558823, + "learning_rate": 2.318e-05, + "loss": 0.0044, + "step": 13410 + }, + { + "epoch": 2.684, + "grad_norm": 0.03945730999112129, + "learning_rate": 2.3160000000000002e-05, + "loss": 0.0004, + "step": 13420 + }, + { + "epoch": 2.686, + "grad_norm": 0.01288557332009077, + "learning_rate": 2.3140000000000002e-05, + "loss": 0.0026, + "step": 13430 + }, + { + "epoch": 2.6879999999999997, + "grad_norm": 0.5390536189079285, + "learning_rate": 2.312e-05, + "loss": 0.0332, + "step": 13440 + }, + { + "epoch": 2.69, + "grad_norm": 0.006479345262050629, + "learning_rate": 2.3100000000000002e-05, + "loss": 0.0005, + "step": 13450 + }, + { + "epoch": 2.692, + "grad_norm": 0.008790848776698112, + "learning_rate": 2.3080000000000003e-05, + "loss": 0.0004, + "step": 13460 + }, + { + "epoch": 2.694, + "grad_norm": 0.004521473310887814, + "learning_rate": 2.306e-05, + "loss": 0.004, + "step": 13470 + }, + { + "epoch": 2.6959999999999997, + "grad_norm": 6.016846179962158, + "learning_rate": 2.304e-05, + "loss": 0.0207, + "step": 13480 + }, + { + "epoch": 2.698, + "grad_norm": 0.016385849565267563, + "learning_rate": 2.302e-05, + "loss": 0.0006, + "step": 13490 + }, + { + "epoch": 2.7, + "grad_norm": 0.0029000595677644014, + "learning_rate": 2.3000000000000003e-05, + "loss": 0.0246, + "step": 13500 + }, + { + "epoch": 2.702, + "grad_norm": 0.017534567043185234, + "learning_rate": 2.298e-05, + "loss": 0.0008, + "step": 13510 + }, + { + "epoch": 2.7039999999999997, + "grad_norm": 0.008091018535196781, + "learning_rate": 2.296e-05, + "loss": 0.0006, + "step": 13520 + }, + { + "epoch": 2.706, + "grad_norm": 0.008187901228666306, + "learning_rate": 2.294e-05, + "loss": 0.0005, + "step": 13530 + }, + { + "epoch": 2.708, + "grad_norm": 0.004407752305269241, + "learning_rate": 2.292e-05, + "loss": 0.0228, + "step": 13540 + }, + { + "epoch": 2.71, + "grad_norm": 0.03731589391827583, + "learning_rate": 2.29e-05, + "loss": 0.0063, + "step": 13550 + }, + { + "epoch": 2.7119999999999997, + "grad_norm": 0.005584000609815121, + "learning_rate": 2.288e-05, + "loss": 0.0008, + "step": 13560 + }, + { + "epoch": 2.714, + "grad_norm": 0.03260991722345352, + "learning_rate": 2.286e-05, + "loss": 0.0006, + "step": 13570 + }, + { + "epoch": 2.716, + "grad_norm": 0.006006445270031691, + "learning_rate": 2.284e-05, + "loss": 0.0337, + "step": 13580 + }, + { + "epoch": 2.718, + "grad_norm": 0.003870969172567129, + "learning_rate": 2.282e-05, + "loss": 0.0005, + "step": 13590 + }, + { + "epoch": 2.7199999999999998, + "grad_norm": 0.013978478498756886, + "learning_rate": 2.2800000000000002e-05, + "loss": 0.001, + "step": 13600 + }, + { + "epoch": 2.722, + "grad_norm": 0.003443440655246377, + "learning_rate": 2.2780000000000002e-05, + "loss": 0.0763, + "step": 13610 + }, + { + "epoch": 2.724, + "grad_norm": 0.0038477852940559387, + "learning_rate": 2.2760000000000002e-05, + "loss": 0.0007, + "step": 13620 + }, + { + "epoch": 2.726, + "grad_norm": 0.002752868691459298, + "learning_rate": 2.274e-05, + "loss": 0.0466, + "step": 13630 + }, + { + "epoch": 2.7279999999999998, + "grad_norm": 0.010040170513093472, + "learning_rate": 2.2720000000000003e-05, + "loss": 0.0022, + "step": 13640 + }, + { + "epoch": 2.73, + "grad_norm": 2.712369203567505, + "learning_rate": 2.2700000000000003e-05, + "loss": 0.0411, + "step": 13650 + }, + { + "epoch": 2.732, + "grad_norm": 0.018802713602781296, + "learning_rate": 2.268e-05, + "loss": 0.0206, + "step": 13660 + }, + { + "epoch": 2.734, + "grad_norm": 0.04902869462966919, + "learning_rate": 2.266e-05, + "loss": 0.0009, + "step": 13670 + }, + { + "epoch": 2.7359999999999998, + "grad_norm": 0.0029866821132600307, + "learning_rate": 2.264e-05, + "loss": 0.0255, + "step": 13680 + }, + { + "epoch": 2.738, + "grad_norm": 0.004088583867996931, + "learning_rate": 2.2620000000000004e-05, + "loss": 0.0288, + "step": 13690 + }, + { + "epoch": 2.74, + "grad_norm": 0.04366866126656532, + "learning_rate": 2.26e-05, + "loss": 0.0027, + "step": 13700 + }, + { + "epoch": 2.742, + "grad_norm": 0.6947624683380127, + "learning_rate": 2.258e-05, + "loss": 0.0009, + "step": 13710 + }, + { + "epoch": 2.7439999999999998, + "grad_norm": 0.011672383174300194, + "learning_rate": 2.256e-05, + "loss": 0.06, + "step": 13720 + }, + { + "epoch": 2.746, + "grad_norm": 0.0022892083507031202, + "learning_rate": 2.254e-05, + "loss": 0.0385, + "step": 13730 + }, + { + "epoch": 2.748, + "grad_norm": 0.014559238217771053, + "learning_rate": 2.252e-05, + "loss": 0.001, + "step": 13740 + }, + { + "epoch": 2.75, + "grad_norm": 0.0139195891097188, + "learning_rate": 2.25e-05, + "loss": 0.0528, + "step": 13750 + }, + { + "epoch": 2.752, + "grad_norm": 0.011245626956224442, + "learning_rate": 2.248e-05, + "loss": 0.0125, + "step": 13760 + }, + { + "epoch": 2.754, + "grad_norm": 0.009724706411361694, + "learning_rate": 2.2460000000000002e-05, + "loss": 0.0006, + "step": 13770 + }, + { + "epoch": 2.7560000000000002, + "grad_norm": 0.017409011721611023, + "learning_rate": 2.244e-05, + "loss": 0.018, + "step": 13780 + }, + { + "epoch": 2.758, + "grad_norm": 0.03546595573425293, + "learning_rate": 2.2420000000000002e-05, + "loss": 0.0517, + "step": 13790 + }, + { + "epoch": 2.76, + "grad_norm": 0.004322000779211521, + "learning_rate": 2.2400000000000002e-05, + "loss": 0.0185, + "step": 13800 + }, + { + "epoch": 2.762, + "grad_norm": 0.013103820383548737, + "learning_rate": 2.2380000000000003e-05, + "loss": 0.0113, + "step": 13810 + }, + { + "epoch": 2.7640000000000002, + "grad_norm": 0.008694900199770927, + "learning_rate": 2.236e-05, + "loss": 0.0322, + "step": 13820 + }, + { + "epoch": 2.766, + "grad_norm": 0.026327064260840416, + "learning_rate": 2.234e-05, + "loss": 0.0014, + "step": 13830 + }, + { + "epoch": 2.768, + "grad_norm": 0.025663388893008232, + "learning_rate": 2.2320000000000003e-05, + "loss": 0.0107, + "step": 13840 + }, + { + "epoch": 2.77, + "grad_norm": 0.0036214159335941076, + "learning_rate": 2.23e-05, + "loss": 0.0395, + "step": 13850 + }, + { + "epoch": 2.7720000000000002, + "grad_norm": 0.002212496241554618, + "learning_rate": 2.228e-05, + "loss": 0.0008, + "step": 13860 + }, + { + "epoch": 2.774, + "grad_norm": 0.007965583354234695, + "learning_rate": 2.226e-05, + "loss": 0.0433, + "step": 13870 + }, + { + "epoch": 2.776, + "grad_norm": 0.006145271472632885, + "learning_rate": 2.224e-05, + "loss": 0.0205, + "step": 13880 + }, + { + "epoch": 2.778, + "grad_norm": 0.004019021987915039, + "learning_rate": 2.222e-05, + "loss": 0.0005, + "step": 13890 + }, + { + "epoch": 2.7800000000000002, + "grad_norm": 0.013389227911829948, + "learning_rate": 2.22e-05, + "loss": 0.0006, + "step": 13900 + }, + { + "epoch": 2.782, + "grad_norm": 0.009832042269408703, + "learning_rate": 2.218e-05, + "loss": 0.0021, + "step": 13910 + }, + { + "epoch": 2.784, + "grad_norm": 0.0037811503279954195, + "learning_rate": 2.216e-05, + "loss": 0.0152, + "step": 13920 + }, + { + "epoch": 2.786, + "grad_norm": 0.00249070068821311, + "learning_rate": 2.214e-05, + "loss": 0.0013, + "step": 13930 + }, + { + "epoch": 2.7880000000000003, + "grad_norm": 0.008035962469875813, + "learning_rate": 2.212e-05, + "loss": 0.0216, + "step": 13940 + }, + { + "epoch": 2.79, + "grad_norm": 0.003760936204344034, + "learning_rate": 2.2100000000000002e-05, + "loss": 0.0454, + "step": 13950 + }, + { + "epoch": 2.792, + "grad_norm": 0.15059198439121246, + "learning_rate": 2.2080000000000002e-05, + "loss": 0.0389, + "step": 13960 + }, + { + "epoch": 2.794, + "grad_norm": 0.00915328599512577, + "learning_rate": 2.206e-05, + "loss": 0.0007, + "step": 13970 + }, + { + "epoch": 2.7960000000000003, + "grad_norm": 0.019227130338549614, + "learning_rate": 2.2040000000000002e-05, + "loss": 0.0225, + "step": 13980 + }, + { + "epoch": 2.798, + "grad_norm": 0.0020889227744191885, + "learning_rate": 2.2020000000000003e-05, + "loss": 0.0029, + "step": 13990 + }, + { + "epoch": 2.8, + "grad_norm": 0.022324854508042336, + "learning_rate": 2.2000000000000003e-05, + "loss": 0.0008, + "step": 14000 + }, + { + "epoch": 2.802, + "grad_norm": 0.010531187057495117, + "learning_rate": 2.198e-05, + "loss": 0.0039, + "step": 14010 + }, + { + "epoch": 2.8040000000000003, + "grad_norm": 0.10176002234220505, + "learning_rate": 2.196e-05, + "loss": 0.0275, + "step": 14020 + }, + { + "epoch": 2.806, + "grad_norm": 0.013241732493042946, + "learning_rate": 2.1940000000000003e-05, + "loss": 0.0005, + "step": 14030 + }, + { + "epoch": 2.808, + "grad_norm": 0.009389587678015232, + "learning_rate": 2.192e-05, + "loss": 0.0387, + "step": 14040 + }, + { + "epoch": 2.81, + "grad_norm": 0.003362020943313837, + "learning_rate": 2.19e-05, + "loss": 0.0007, + "step": 14050 + }, + { + "epoch": 2.8120000000000003, + "grad_norm": 0.0029993215575814247, + "learning_rate": 2.188e-05, + "loss": 0.0324, + "step": 14060 + }, + { + "epoch": 2.814, + "grad_norm": 0.04549940302968025, + "learning_rate": 2.186e-05, + "loss": 0.0656, + "step": 14070 + }, + { + "epoch": 2.816, + "grad_norm": 9.865263938903809, + "learning_rate": 2.184e-05, + "loss": 0.0314, + "step": 14080 + }, + { + "epoch": 2.818, + "grad_norm": 0.007688828743994236, + "learning_rate": 2.182e-05, + "loss": 0.0312, + "step": 14090 + }, + { + "epoch": 2.82, + "grad_norm": 0.01804133504629135, + "learning_rate": 2.18e-05, + "loss": 0.0003, + "step": 14100 + }, + { + "epoch": 2.822, + "grad_norm": 0.004246327094733715, + "learning_rate": 2.178e-05, + "loss": 0.021, + "step": 14110 + }, + { + "epoch": 2.824, + "grad_norm": 0.01003707479685545, + "learning_rate": 2.176e-05, + "loss": 0.0014, + "step": 14120 + }, + { + "epoch": 2.826, + "grad_norm": 0.015356244519352913, + "learning_rate": 2.1740000000000002e-05, + "loss": 0.0007, + "step": 14130 + }, + { + "epoch": 2.828, + "grad_norm": 0.0139754144474864, + "learning_rate": 2.1720000000000002e-05, + "loss": 0.0004, + "step": 14140 + }, + { + "epoch": 2.83, + "grad_norm": 0.1567094773054123, + "learning_rate": 2.1700000000000002e-05, + "loss": 0.001, + "step": 14150 + }, + { + "epoch": 2.832, + "grad_norm": 0.012544950470328331, + "learning_rate": 2.168e-05, + "loss": 0.0147, + "step": 14160 + }, + { + "epoch": 2.834, + "grad_norm": 0.005918503273278475, + "learning_rate": 2.166e-05, + "loss": 0.0014, + "step": 14170 + }, + { + "epoch": 2.836, + "grad_norm": 4.1460065841674805, + "learning_rate": 2.1640000000000003e-05, + "loss": 0.0416, + "step": 14180 + }, + { + "epoch": 2.838, + "grad_norm": 0.0017820524517446756, + "learning_rate": 2.162e-05, + "loss": 0.0352, + "step": 14190 + }, + { + "epoch": 2.84, + "grad_norm": 0.01237359270453453, + "learning_rate": 2.16e-05, + "loss": 0.0016, + "step": 14200 + }, + { + "epoch": 2.842, + "grad_norm": 0.002615107223391533, + "learning_rate": 2.158e-05, + "loss": 0.0083, + "step": 14210 + }, + { + "epoch": 2.844, + "grad_norm": 0.006978666875511408, + "learning_rate": 2.1560000000000004e-05, + "loss": 0.0426, + "step": 14220 + }, + { + "epoch": 2.846, + "grad_norm": 0.0061668511480093, + "learning_rate": 2.154e-05, + "loss": 0.0171, + "step": 14230 + }, + { + "epoch": 2.848, + "grad_norm": 0.004591886419802904, + "learning_rate": 2.152e-05, + "loss": 0.0529, + "step": 14240 + }, + { + "epoch": 2.85, + "grad_norm": 0.0053439270704984665, + "learning_rate": 2.15e-05, + "loss": 0.0039, + "step": 14250 + }, + { + "epoch": 2.852, + "grad_norm": 0.005400092341005802, + "learning_rate": 2.148e-05, + "loss": 0.0006, + "step": 14260 + }, + { + "epoch": 2.854, + "grad_norm": 0.011323225684463978, + "learning_rate": 2.146e-05, + "loss": 0.0053, + "step": 14270 + }, + { + "epoch": 2.856, + "grad_norm": 9.84250545501709, + "learning_rate": 2.144e-05, + "loss": 0.0966, + "step": 14280 + }, + { + "epoch": 2.858, + "grad_norm": 0.013491962105035782, + "learning_rate": 2.142e-05, + "loss": 0.0007, + "step": 14290 + }, + { + "epoch": 2.86, + "grad_norm": 0.003985918127000332, + "learning_rate": 2.1400000000000002e-05, + "loss": 0.015, + "step": 14300 + }, + { + "epoch": 2.862, + "grad_norm": 3.3448140621185303, + "learning_rate": 2.138e-05, + "loss": 0.0021, + "step": 14310 + }, + { + "epoch": 2.864, + "grad_norm": 0.02373521216213703, + "learning_rate": 2.1360000000000002e-05, + "loss": 0.0245, + "step": 14320 + }, + { + "epoch": 2.866, + "grad_norm": 0.029405981302261353, + "learning_rate": 2.1340000000000002e-05, + "loss": 0.0061, + "step": 14330 + }, + { + "epoch": 2.868, + "grad_norm": 0.007544131018221378, + "learning_rate": 2.1320000000000003e-05, + "loss": 0.0445, + "step": 14340 + }, + { + "epoch": 2.87, + "grad_norm": 0.035514336079359055, + "learning_rate": 2.13e-05, + "loss": 0.0183, + "step": 14350 + }, + { + "epoch": 2.872, + "grad_norm": 0.0044698393903672695, + "learning_rate": 2.128e-05, + "loss": 0.0425, + "step": 14360 + }, + { + "epoch": 2.874, + "grad_norm": 0.14416873455047607, + "learning_rate": 2.1260000000000003e-05, + "loss": 0.0067, + "step": 14370 + }, + { + "epoch": 2.876, + "grad_norm": 4.21664571762085, + "learning_rate": 2.124e-05, + "loss": 0.0773, + "step": 14380 + }, + { + "epoch": 2.878, + "grad_norm": 5.436165809631348, + "learning_rate": 2.122e-05, + "loss": 0.041, + "step": 14390 + }, + { + "epoch": 2.88, + "grad_norm": 0.02734544686973095, + "learning_rate": 2.12e-05, + "loss": 0.0032, + "step": 14400 + }, + { + "epoch": 2.882, + "grad_norm": 0.0023346785455942154, + "learning_rate": 2.118e-05, + "loss": 0.0655, + "step": 14410 + }, + { + "epoch": 2.884, + "grad_norm": 0.006281679030507803, + "learning_rate": 2.116e-05, + "loss": 0.0142, + "step": 14420 + }, + { + "epoch": 2.886, + "grad_norm": 0.024367235600948334, + "learning_rate": 2.114e-05, + "loss": 0.0011, + "step": 14430 + }, + { + "epoch": 2.888, + "grad_norm": 0.00849449634552002, + "learning_rate": 2.112e-05, + "loss": 0.0981, + "step": 14440 + }, + { + "epoch": 2.89, + "grad_norm": 0.021741211414337158, + "learning_rate": 2.11e-05, + "loss": 0.0032, + "step": 14450 + }, + { + "epoch": 2.892, + "grad_norm": 0.0908471941947937, + "learning_rate": 2.1079999999999998e-05, + "loss": 0.006, + "step": 14460 + }, + { + "epoch": 2.894, + "grad_norm": 0.009153512306511402, + "learning_rate": 2.106e-05, + "loss": 0.0031, + "step": 14470 + }, + { + "epoch": 2.896, + "grad_norm": 0.0878956988453865, + "learning_rate": 2.1040000000000002e-05, + "loss": 0.0023, + "step": 14480 + }, + { + "epoch": 2.898, + "grad_norm": 0.22682340443134308, + "learning_rate": 2.1020000000000002e-05, + "loss": 0.0026, + "step": 14490 + }, + { + "epoch": 2.9, + "grad_norm": 0.010414546355605125, + "learning_rate": 2.1e-05, + "loss": 0.0239, + "step": 14500 + }, + { + "epoch": 2.902, + "grad_norm": 1.4128164052963257, + "learning_rate": 2.098e-05, + "loss": 0.0017, + "step": 14510 + }, + { + "epoch": 2.904, + "grad_norm": 0.1310981959104538, + "learning_rate": 2.0960000000000003e-05, + "loss": 0.0339, + "step": 14520 + }, + { + "epoch": 2.906, + "grad_norm": 0.0020759692415595055, + "learning_rate": 2.0940000000000003e-05, + "loss": 0.001, + "step": 14530 + }, + { + "epoch": 2.908, + "grad_norm": 6.847393989562988, + "learning_rate": 2.092e-05, + "loss": 0.0242, + "step": 14540 + }, + { + "epoch": 2.91, + "grad_norm": 0.002632703399285674, + "learning_rate": 2.09e-05, + "loss": 0.0043, + "step": 14550 + }, + { + "epoch": 2.912, + "grad_norm": 0.12055040150880814, + "learning_rate": 2.0880000000000003e-05, + "loss": 0.0106, + "step": 14560 + }, + { + "epoch": 2.914, + "grad_norm": 0.008990177884697914, + "learning_rate": 2.086e-05, + "loss": 0.0003, + "step": 14570 + }, + { + "epoch": 2.916, + "grad_norm": 0.005368634592741728, + "learning_rate": 2.084e-05, + "loss": 0.032, + "step": 14580 + }, + { + "epoch": 2.918, + "grad_norm": 0.003290928667411208, + "learning_rate": 2.082e-05, + "loss": 0.0009, + "step": 14590 + }, + { + "epoch": 2.92, + "grad_norm": 0.0031266543082892895, + "learning_rate": 2.08e-05, + "loss": 0.0061, + "step": 14600 + }, + { + "epoch": 2.922, + "grad_norm": 0.020411498844623566, + "learning_rate": 2.078e-05, + "loss": 0.0033, + "step": 14610 + }, + { + "epoch": 2.924, + "grad_norm": 0.01999702863395214, + "learning_rate": 2.076e-05, + "loss": 0.0025, + "step": 14620 + }, + { + "epoch": 2.926, + "grad_norm": 2.3530824184417725, + "learning_rate": 2.074e-05, + "loss": 0.0325, + "step": 14630 + }, + { + "epoch": 2.928, + "grad_norm": 0.0432647280395031, + "learning_rate": 2.072e-05, + "loss": 0.0173, + "step": 14640 + }, + { + "epoch": 2.93, + "grad_norm": 0.03774017095565796, + "learning_rate": 2.07e-05, + "loss": 0.0638, + "step": 14650 + }, + { + "epoch": 2.932, + "grad_norm": 0.023275645449757576, + "learning_rate": 2.0680000000000002e-05, + "loss": 0.0008, + "step": 14660 + }, + { + "epoch": 2.934, + "grad_norm": 0.0035587181337177753, + "learning_rate": 2.0660000000000002e-05, + "loss": 0.0615, + "step": 14670 + }, + { + "epoch": 2.936, + "grad_norm": 0.012016230262815952, + "learning_rate": 2.0640000000000002e-05, + "loss": 0.0012, + "step": 14680 + }, + { + "epoch": 2.9379999999999997, + "grad_norm": 0.0014051154721528292, + "learning_rate": 2.062e-05, + "loss": 0.0021, + "step": 14690 + }, + { + "epoch": 2.94, + "grad_norm": 0.002413893351331353, + "learning_rate": 2.06e-05, + "loss": 0.0089, + "step": 14700 + }, + { + "epoch": 2.942, + "grad_norm": 0.002096255775541067, + "learning_rate": 2.0580000000000003e-05, + "loss": 0.0213, + "step": 14710 + }, + { + "epoch": 2.944, + "grad_norm": 0.1285342127084732, + "learning_rate": 2.0560000000000003e-05, + "loss": 0.0377, + "step": 14720 + }, + { + "epoch": 2.9459999999999997, + "grad_norm": 0.01958276331424713, + "learning_rate": 2.054e-05, + "loss": 0.001, + "step": 14730 + }, + { + "epoch": 2.948, + "grad_norm": 0.03653479367494583, + "learning_rate": 2.052e-05, + "loss": 0.0005, + "step": 14740 + }, + { + "epoch": 2.95, + "grad_norm": 0.009381549432873726, + "learning_rate": 2.05e-05, + "loss": 0.0014, + "step": 14750 + }, + { + "epoch": 2.952, + "grad_norm": 0.03101208433508873, + "learning_rate": 2.048e-05, + "loss": 0.0331, + "step": 14760 + }, + { + "epoch": 2.9539999999999997, + "grad_norm": 0.01349340658634901, + "learning_rate": 2.046e-05, + "loss": 0.0417, + "step": 14770 + }, + { + "epoch": 2.956, + "grad_norm": 0.0013646255247294903, + "learning_rate": 2.044e-05, + "loss": 0.0406, + "step": 14780 + }, + { + "epoch": 2.958, + "grad_norm": 0.0011694016866385937, + "learning_rate": 2.042e-05, + "loss": 0.0306, + "step": 14790 + }, + { + "epoch": 2.96, + "grad_norm": 0.5294227004051208, + "learning_rate": 2.04e-05, + "loss": 0.0279, + "step": 14800 + }, + { + "epoch": 2.9619999999999997, + "grad_norm": 0.006853798869997263, + "learning_rate": 2.038e-05, + "loss": 0.0045, + "step": 14810 + }, + { + "epoch": 2.964, + "grad_norm": 0.0035395545419305563, + "learning_rate": 2.036e-05, + "loss": 0.0244, + "step": 14820 + }, + { + "epoch": 2.966, + "grad_norm": 0.0022441355977207422, + "learning_rate": 2.0340000000000002e-05, + "loss": 0.0012, + "step": 14830 + }, + { + "epoch": 2.968, + "grad_norm": 0.016829049214720726, + "learning_rate": 2.032e-05, + "loss": 0.0013, + "step": 14840 + }, + { + "epoch": 2.9699999999999998, + "grad_norm": 0.11161936819553375, + "learning_rate": 2.0300000000000002e-05, + "loss": 0.0435, + "step": 14850 + }, + { + "epoch": 2.972, + "grad_norm": 0.006620103493332863, + "learning_rate": 2.0280000000000002e-05, + "loss": 0.0388, + "step": 14860 + }, + { + "epoch": 2.974, + "grad_norm": 0.27429601550102234, + "learning_rate": 2.0260000000000003e-05, + "loss": 0.001, + "step": 14870 + }, + { + "epoch": 2.976, + "grad_norm": 0.01480827946215868, + "learning_rate": 2.024e-05, + "loss": 0.0008, + "step": 14880 + }, + { + "epoch": 2.9779999999999998, + "grad_norm": 0.012657509185373783, + "learning_rate": 2.022e-05, + "loss": 0.0022, + "step": 14890 + }, + { + "epoch": 2.98, + "grad_norm": 0.00810237880796194, + "learning_rate": 2.0200000000000003e-05, + "loss": 0.0025, + "step": 14900 + }, + { + "epoch": 2.982, + "grad_norm": 0.0014344545779749751, + "learning_rate": 2.0180000000000003e-05, + "loss": 0.0162, + "step": 14910 + }, + { + "epoch": 2.984, + "grad_norm": 0.004025655798614025, + "learning_rate": 2.016e-05, + "loss": 0.0004, + "step": 14920 + }, + { + "epoch": 2.9859999999999998, + "grad_norm": 0.004194976761937141, + "learning_rate": 2.014e-05, + "loss": 0.0006, + "step": 14930 + }, + { + "epoch": 2.988, + "grad_norm": 0.009011647664010525, + "learning_rate": 2.012e-05, + "loss": 0.0558, + "step": 14940 + }, + { + "epoch": 2.99, + "grad_norm": 0.003613577224314213, + "learning_rate": 2.01e-05, + "loss": 0.0033, + "step": 14950 + }, + { + "epoch": 2.992, + "grad_norm": 0.016397807747125626, + "learning_rate": 2.008e-05, + "loss": 0.0066, + "step": 14960 + }, + { + "epoch": 2.9939999999999998, + "grad_norm": 0.006938213016837835, + "learning_rate": 2.006e-05, + "loss": 0.003, + "step": 14970 + }, + { + "epoch": 2.996, + "grad_norm": 0.0022056391462683678, + "learning_rate": 2.004e-05, + "loss": 0.0008, + "step": 14980 + }, + { + "epoch": 2.998, + "grad_norm": 0.1262286901473999, + "learning_rate": 2.002e-05, + "loss": 0.0048, + "step": 14990 + }, + { + "epoch": 3.0, + "grad_norm": 1.3995038270950317, + "learning_rate": 2e-05, + "loss": 0.0034, + "step": 15000 + }, + { + "epoch": 3.0, + "eval_loss": 0.01657683774828911, + "eval_runtime": 95.2432, + "eval_samples_per_second": 104.994, + "eval_steps_per_second": 13.124, + "step": 15000 + }, + { + "epoch": 3.002, + "grad_norm": 3.7540829181671143, + "learning_rate": 1.9980000000000002e-05, + "loss": 0.0197, + "step": 15010 + }, + { + "epoch": 3.004, + "grad_norm": 0.0008035671780817211, + "learning_rate": 1.9960000000000002e-05, + "loss": 0.0389, + "step": 15020 + }, + { + "epoch": 3.006, + "grad_norm": 0.006811047438532114, + "learning_rate": 1.994e-05, + "loss": 0.0407, + "step": 15030 + }, + { + "epoch": 3.008, + "grad_norm": 0.000698752177413553, + "learning_rate": 1.992e-05, + "loss": 0.0006, + "step": 15040 + }, + { + "epoch": 3.01, + "grad_norm": 0.012239865958690643, + "learning_rate": 1.9900000000000003e-05, + "loss": 0.0213, + "step": 15050 + }, + { + "epoch": 3.012, + "grad_norm": 0.0010888595134019852, + "learning_rate": 1.9880000000000003e-05, + "loss": 0.0509, + "step": 15060 + }, + { + "epoch": 3.014, + "grad_norm": 5.646819114685059, + "learning_rate": 1.986e-05, + "loss": 0.0442, + "step": 15070 + }, + { + "epoch": 3.016, + "grad_norm": 0.3257085978984833, + "learning_rate": 1.984e-05, + "loss": 0.0031, + "step": 15080 + }, + { + "epoch": 3.018, + "grad_norm": 0.166681170463562, + "learning_rate": 1.982e-05, + "loss": 0.0015, + "step": 15090 + }, + { + "epoch": 3.02, + "grad_norm": 0.010647795163094997, + "learning_rate": 1.9800000000000004e-05, + "loss": 0.0003, + "step": 15100 + }, + { + "epoch": 3.022, + "grad_norm": 10.128013610839844, + "learning_rate": 1.978e-05, + "loss": 0.0077, + "step": 15110 + }, + { + "epoch": 3.024, + "grad_norm": 0.005560045130550861, + "learning_rate": 1.976e-05, + "loss": 0.0423, + "step": 15120 + }, + { + "epoch": 3.026, + "grad_norm": 0.01081987377256155, + "learning_rate": 1.974e-05, + "loss": 0.0735, + "step": 15130 + }, + { + "epoch": 3.028, + "grad_norm": 0.0014237303985282779, + "learning_rate": 1.972e-05, + "loss": 0.0004, + "step": 15140 + }, + { + "epoch": 3.03, + "grad_norm": 4.161525726318359, + "learning_rate": 1.97e-05, + "loss": 0.0254, + "step": 15150 + }, + { + "epoch": 3.032, + "grad_norm": 4.0388641357421875, + "learning_rate": 1.968e-05, + "loss": 0.006, + "step": 15160 + }, + { + "epoch": 3.034, + "grad_norm": 0.008463465608656406, + "learning_rate": 1.966e-05, + "loss": 0.0144, + "step": 15170 + }, + { + "epoch": 3.036, + "grad_norm": 0.006266901269555092, + "learning_rate": 1.9640000000000002e-05, + "loss": 0.0045, + "step": 15180 + }, + { + "epoch": 3.038, + "grad_norm": 0.020866410806775093, + "learning_rate": 1.9620000000000002e-05, + "loss": 0.051, + "step": 15190 + }, + { + "epoch": 3.04, + "grad_norm": 0.06101961433887482, + "learning_rate": 1.9600000000000002e-05, + "loss": 0.0082, + "step": 15200 + }, + { + "epoch": 3.042, + "grad_norm": 0.0015061348676681519, + "learning_rate": 1.9580000000000002e-05, + "loss": 0.0237, + "step": 15210 + }, + { + "epoch": 3.044, + "grad_norm": 0.004363874439150095, + "learning_rate": 1.956e-05, + "loss": 0.0161, + "step": 15220 + }, + { + "epoch": 3.046, + "grad_norm": 0.003860832192003727, + "learning_rate": 1.954e-05, + "loss": 0.0452, + "step": 15230 + }, + { + "epoch": 3.048, + "grad_norm": 0.0023255248088389635, + "learning_rate": 1.9520000000000003e-05, + "loss": 0.002, + "step": 15240 + }, + { + "epoch": 3.05, + "grad_norm": 2.2219414710998535, + "learning_rate": 1.9500000000000003e-05, + "loss": 0.003, + "step": 15250 + }, + { + "epoch": 3.052, + "grad_norm": 0.13828614354133606, + "learning_rate": 1.948e-05, + "loss": 0.0455, + "step": 15260 + }, + { + "epoch": 3.054, + "grad_norm": 0.003867669263854623, + "learning_rate": 1.946e-05, + "loss": 0.0004, + "step": 15270 + }, + { + "epoch": 3.056, + "grad_norm": 0.007381816394627094, + "learning_rate": 1.944e-05, + "loss": 0.0005, + "step": 15280 + }, + { + "epoch": 3.058, + "grad_norm": 0.008525327779352665, + "learning_rate": 1.942e-05, + "loss": 0.0427, + "step": 15290 + }, + { + "epoch": 3.06, + "grad_norm": 0.03834341838955879, + "learning_rate": 1.94e-05, + "loss": 0.0023, + "step": 15300 + }, + { + "epoch": 3.062, + "grad_norm": 0.10945117473602295, + "learning_rate": 1.938e-05, + "loss": 0.0174, + "step": 15310 + }, + { + "epoch": 3.064, + "grad_norm": 0.011007886379957199, + "learning_rate": 1.936e-05, + "loss": 0.0007, + "step": 15320 + }, + { + "epoch": 3.066, + "grad_norm": 0.006719260010868311, + "learning_rate": 1.934e-05, + "loss": 0.0003, + "step": 15330 + }, + { + "epoch": 3.068, + "grad_norm": 0.0017096955562010407, + "learning_rate": 1.932e-05, + "loss": 0.0163, + "step": 15340 + }, + { + "epoch": 3.07, + "grad_norm": 0.00444265129044652, + "learning_rate": 1.93e-05, + "loss": 0.0008, + "step": 15350 + }, + { + "epoch": 3.072, + "grad_norm": 0.0027511902153491974, + "learning_rate": 1.9280000000000002e-05, + "loss": 0.0048, + "step": 15360 + }, + { + "epoch": 3.074, + "grad_norm": 0.08696744590997696, + "learning_rate": 1.9260000000000002e-05, + "loss": 0.0007, + "step": 15370 + }, + { + "epoch": 3.076, + "grad_norm": 0.010581353679299355, + "learning_rate": 1.924e-05, + "loss": 0.0006, + "step": 15380 + }, + { + "epoch": 3.078, + "grad_norm": 6.367326736450195, + "learning_rate": 1.9220000000000002e-05, + "loss": 0.0151, + "step": 15390 + }, + { + "epoch": 3.08, + "grad_norm": 0.038151416927576065, + "learning_rate": 1.9200000000000003e-05, + "loss": 0.0007, + "step": 15400 + }, + { + "epoch": 3.082, + "grad_norm": 0.0035086292773485184, + "learning_rate": 1.918e-05, + "loss": 0.0039, + "step": 15410 + }, + { + "epoch": 3.084, + "grad_norm": 0.006965567357838154, + "learning_rate": 1.916e-05, + "loss": 0.0107, + "step": 15420 + }, + { + "epoch": 3.086, + "grad_norm": 0.006669293623417616, + "learning_rate": 1.914e-05, + "loss": 0.0393, + "step": 15430 + }, + { + "epoch": 3.088, + "grad_norm": 0.015491176396608353, + "learning_rate": 1.9120000000000003e-05, + "loss": 0.0008, + "step": 15440 + }, + { + "epoch": 3.09, + "grad_norm": 0.0049215275794267654, + "learning_rate": 1.91e-05, + "loss": 0.0156, + "step": 15450 + }, + { + "epoch": 3.092, + "grad_norm": 0.006124439183622599, + "learning_rate": 1.908e-05, + "loss": 0.0012, + "step": 15460 + }, + { + "epoch": 3.094, + "grad_norm": 0.00404756236821413, + "learning_rate": 1.906e-05, + "loss": 0.0004, + "step": 15470 + }, + { + "epoch": 3.096, + "grad_norm": 0.0698126032948494, + "learning_rate": 1.904e-05, + "loss": 0.0005, + "step": 15480 + }, + { + "epoch": 3.098, + "grad_norm": 0.2247430384159088, + "learning_rate": 1.902e-05, + "loss": 0.0006, + "step": 15490 + }, + { + "epoch": 3.1, + "grad_norm": 0.028489235788583755, + "learning_rate": 1.9e-05, + "loss": 0.0127, + "step": 15500 + }, + { + "epoch": 3.102, + "grad_norm": 0.0063696675933897495, + "learning_rate": 1.898e-05, + "loss": 0.0007, + "step": 15510 + }, + { + "epoch": 3.104, + "grad_norm": 0.0047574895434081554, + "learning_rate": 1.896e-05, + "loss": 0.0002, + "step": 15520 + }, + { + "epoch": 3.106, + "grad_norm": 0.05734870955348015, + "learning_rate": 1.894e-05, + "loss": 0.0011, + "step": 15530 + }, + { + "epoch": 3.108, + "grad_norm": 0.0044009024277329445, + "learning_rate": 1.8920000000000002e-05, + "loss": 0.0823, + "step": 15540 + }, + { + "epoch": 3.11, + "grad_norm": 0.002054518321529031, + "learning_rate": 1.8900000000000002e-05, + "loss": 0.0005, + "step": 15550 + }, + { + "epoch": 3.112, + "grad_norm": 0.017458245158195496, + "learning_rate": 1.888e-05, + "loss": 0.001, + "step": 15560 + }, + { + "epoch": 3.114, + "grad_norm": 0.008351721800863743, + "learning_rate": 1.886e-05, + "loss": 0.0024, + "step": 15570 + }, + { + "epoch": 3.116, + "grad_norm": 0.002828374970704317, + "learning_rate": 1.8840000000000003e-05, + "loss": 0.0004, + "step": 15580 + }, + { + "epoch": 3.118, + "grad_norm": 0.0023917988874018192, + "learning_rate": 1.8820000000000003e-05, + "loss": 0.0003, + "step": 15590 + }, + { + "epoch": 3.12, + "grad_norm": 0.02163778804242611, + "learning_rate": 1.88e-05, + "loss": 0.0456, + "step": 15600 + }, + { + "epoch": 3.122, + "grad_norm": 0.0021023666486144066, + "learning_rate": 1.878e-05, + "loss": 0.0014, + "step": 15610 + }, + { + "epoch": 3.124, + "grad_norm": 0.006280581932514906, + "learning_rate": 1.876e-05, + "loss": 0.0151, + "step": 15620 + }, + { + "epoch": 3.126, + "grad_norm": 0.23775038123130798, + "learning_rate": 1.8740000000000004e-05, + "loss": 0.0005, + "step": 15630 + }, + { + "epoch": 3.128, + "grad_norm": 0.7629896402359009, + "learning_rate": 1.872e-05, + "loss": 0.0009, + "step": 15640 + }, + { + "epoch": 3.13, + "grad_norm": 0.07593518495559692, + "learning_rate": 1.87e-05, + "loss": 0.0013, + "step": 15650 + }, + { + "epoch": 3.132, + "grad_norm": 0.0027242007199674845, + "learning_rate": 1.868e-05, + "loss": 0.0936, + "step": 15660 + }, + { + "epoch": 3.134, + "grad_norm": 0.00594953540712595, + "learning_rate": 1.866e-05, + "loss": 0.0412, + "step": 15670 + }, + { + "epoch": 3.136, + "grad_norm": 0.007625892758369446, + "learning_rate": 1.864e-05, + "loss": 0.0003, + "step": 15680 + }, + { + "epoch": 3.138, + "grad_norm": 0.003998244646936655, + "learning_rate": 1.862e-05, + "loss": 0.0086, + "step": 15690 + }, + { + "epoch": 3.14, + "grad_norm": 0.02065129391849041, + "learning_rate": 1.86e-05, + "loss": 0.0052, + "step": 15700 + }, + { + "epoch": 3.142, + "grad_norm": 0.0024038213305175304, + "learning_rate": 1.858e-05, + "loss": 0.0003, + "step": 15710 + }, + { + "epoch": 3.144, + "grad_norm": 0.0023801634088158607, + "learning_rate": 1.856e-05, + "loss": 0.0519, + "step": 15720 + }, + { + "epoch": 3.146, + "grad_norm": 0.050345417112112045, + "learning_rate": 1.8540000000000002e-05, + "loss": 0.0507, + "step": 15730 + }, + { + "epoch": 3.148, + "grad_norm": 3.0590100288391113, + "learning_rate": 1.8520000000000002e-05, + "loss": 0.0224, + "step": 15740 + }, + { + "epoch": 3.15, + "grad_norm": 0.051652975380420685, + "learning_rate": 1.85e-05, + "loss": 0.0006, + "step": 15750 + }, + { + "epoch": 3.152, + "grad_norm": 0.00765568669885397, + "learning_rate": 1.848e-05, + "loss": 0.0007, + "step": 15760 + }, + { + "epoch": 3.154, + "grad_norm": 0.08669667690992355, + "learning_rate": 1.846e-05, + "loss": 0.0471, + "step": 15770 + }, + { + "epoch": 3.156, + "grad_norm": 0.0020077722147107124, + "learning_rate": 1.8440000000000003e-05, + "loss": 0.0436, + "step": 15780 + }, + { + "epoch": 3.158, + "grad_norm": 0.008714812807738781, + "learning_rate": 1.842e-05, + "loss": 0.0017, + "step": 15790 + }, + { + "epoch": 3.16, + "grad_norm": 0.008934718556702137, + "learning_rate": 1.84e-05, + "loss": 0.0003, + "step": 15800 + }, + { + "epoch": 3.162, + "grad_norm": 0.0029634966049343348, + "learning_rate": 1.838e-05, + "loss": 0.0048, + "step": 15810 + }, + { + "epoch": 3.164, + "grad_norm": 0.26886558532714844, + "learning_rate": 1.8360000000000004e-05, + "loss": 0.0015, + "step": 15820 + }, + { + "epoch": 3.166, + "grad_norm": 4.406497955322266, + "learning_rate": 1.834e-05, + "loss": 0.1182, + "step": 15830 + }, + { + "epoch": 3.168, + "grad_norm": 0.006616260390728712, + "learning_rate": 1.832e-05, + "loss": 0.0223, + "step": 15840 + }, + { + "epoch": 3.17, + "grad_norm": 0.007440556772053242, + "learning_rate": 1.83e-05, + "loss": 0.0015, + "step": 15850 + }, + { + "epoch": 3.172, + "grad_norm": 0.08389567583799362, + "learning_rate": 1.828e-05, + "loss": 0.0223, + "step": 15860 + }, + { + "epoch": 3.174, + "grad_norm": 0.08651598542928696, + "learning_rate": 1.826e-05, + "loss": 0.0119, + "step": 15870 + }, + { + "epoch": 3.176, + "grad_norm": 0.0037528069224208593, + "learning_rate": 1.824e-05, + "loss": 0.001, + "step": 15880 + }, + { + "epoch": 3.178, + "grad_norm": 0.000578329199925065, + "learning_rate": 1.8220000000000002e-05, + "loss": 0.0381, + "step": 15890 + }, + { + "epoch": 3.18, + "grad_norm": 0.0035956057254225016, + "learning_rate": 1.8200000000000002e-05, + "loss": 0.0006, + "step": 15900 + }, + { + "epoch": 3.182, + "grad_norm": 0.013380602933466434, + "learning_rate": 1.818e-05, + "loss": 0.0178, + "step": 15910 + }, + { + "epoch": 3.184, + "grad_norm": 0.023049576207995415, + "learning_rate": 1.8160000000000002e-05, + "loss": 0.001, + "step": 15920 + }, + { + "epoch": 3.186, + "grad_norm": 0.0015177453169599175, + "learning_rate": 1.8140000000000003e-05, + "loss": 0.0016, + "step": 15930 + }, + { + "epoch": 3.188, + "grad_norm": 0.009679178707301617, + "learning_rate": 1.812e-05, + "loss": 0.0062, + "step": 15940 + }, + { + "epoch": 3.19, + "grad_norm": 0.0049837734550237656, + "learning_rate": 1.81e-05, + "loss": 0.0086, + "step": 15950 + }, + { + "epoch": 3.192, + "grad_norm": 3.9954798221588135, + "learning_rate": 1.808e-05, + "loss": 0.0424, + "step": 15960 + }, + { + "epoch": 3.194, + "grad_norm": 0.029047487303614616, + "learning_rate": 1.8060000000000003e-05, + "loss": 0.0254, + "step": 15970 + }, + { + "epoch": 3.196, + "grad_norm": 0.007384998258203268, + "learning_rate": 1.804e-05, + "loss": 0.0487, + "step": 15980 + }, + { + "epoch": 3.198, + "grad_norm": 2.2781753540039062, + "learning_rate": 1.802e-05, + "loss": 0.0428, + "step": 15990 + }, + { + "epoch": 3.2, + "grad_norm": 2.6462597846984863, + "learning_rate": 1.8e-05, + "loss": 0.0718, + "step": 16000 + }, + { + "epoch": 3.202, + "grad_norm": 0.005736429709941149, + "learning_rate": 1.798e-05, + "loss": 0.0014, + "step": 16010 + }, + { + "epoch": 3.204, + "grad_norm": 0.006327548995614052, + "learning_rate": 1.796e-05, + "loss": 0.0066, + "step": 16020 + }, + { + "epoch": 3.206, + "grad_norm": 0.006256358232349157, + "learning_rate": 1.794e-05, + "loss": 0.0191, + "step": 16030 + }, + { + "epoch": 3.208, + "grad_norm": 0.008648771792650223, + "learning_rate": 1.792e-05, + "loss": 0.0005, + "step": 16040 + }, + { + "epoch": 3.21, + "grad_norm": 1.948608160018921, + "learning_rate": 1.79e-05, + "loss": 0.0298, + "step": 16050 + }, + { + "epoch": 3.212, + "grad_norm": 0.21498942375183105, + "learning_rate": 1.7879999999999998e-05, + "loss": 0.0007, + "step": 16060 + }, + { + "epoch": 3.214, + "grad_norm": 0.004237090237438679, + "learning_rate": 1.7860000000000002e-05, + "loss": 0.0303, + "step": 16070 + }, + { + "epoch": 3.216, + "grad_norm": 0.0034377332776784897, + "learning_rate": 1.7840000000000002e-05, + "loss": 0.0252, + "step": 16080 + }, + { + "epoch": 3.218, + "grad_norm": 0.36846575140953064, + "learning_rate": 1.7820000000000002e-05, + "loss": 0.0419, + "step": 16090 + }, + { + "epoch": 3.22, + "grad_norm": 0.13659149408340454, + "learning_rate": 1.78e-05, + "loss": 0.0007, + "step": 16100 + }, + { + "epoch": 3.222, + "grad_norm": 0.015541269443929195, + "learning_rate": 1.7780000000000003e-05, + "loss": 0.0005, + "step": 16110 + }, + { + "epoch": 3.224, + "grad_norm": 0.03992128372192383, + "learning_rate": 1.7760000000000003e-05, + "loss": 0.0103, + "step": 16120 + }, + { + "epoch": 3.226, + "grad_norm": 0.005680619738996029, + "learning_rate": 1.774e-05, + "loss": 0.0011, + "step": 16130 + }, + { + "epoch": 3.228, + "grad_norm": 0.009108452126383781, + "learning_rate": 1.772e-05, + "loss": 0.0011, + "step": 16140 + }, + { + "epoch": 3.23, + "grad_norm": 0.0023171931970864534, + "learning_rate": 1.77e-05, + "loss": 0.0325, + "step": 16150 + }, + { + "epoch": 3.232, + "grad_norm": 0.01577291637659073, + "learning_rate": 1.7680000000000004e-05, + "loss": 0.0006, + "step": 16160 + }, + { + "epoch": 3.234, + "grad_norm": 0.03504559025168419, + "learning_rate": 1.766e-05, + "loss": 0.0491, + "step": 16170 + }, + { + "epoch": 3.2359999999999998, + "grad_norm": 0.033147890120744705, + "learning_rate": 1.764e-05, + "loss": 0.0039, + "step": 16180 + }, + { + "epoch": 3.238, + "grad_norm": 0.027644271031022072, + "learning_rate": 1.762e-05, + "loss": 0.0007, + "step": 16190 + }, + { + "epoch": 3.24, + "grad_norm": 0.2593711316585541, + "learning_rate": 1.76e-05, + "loss": 0.0306, + "step": 16200 + }, + { + "epoch": 3.242, + "grad_norm": 0.02173049934208393, + "learning_rate": 1.758e-05, + "loss": 0.0008, + "step": 16210 + }, + { + "epoch": 3.2439999999999998, + "grad_norm": 0.3893928825855255, + "learning_rate": 1.756e-05, + "loss": 0.0015, + "step": 16220 + }, + { + "epoch": 3.246, + "grad_norm": 1.5050745010375977, + "learning_rate": 1.754e-05, + "loss": 0.0016, + "step": 16230 + }, + { + "epoch": 3.248, + "grad_norm": 0.014095143415033817, + "learning_rate": 1.752e-05, + "loss": 0.0291, + "step": 16240 + }, + { + "epoch": 3.25, + "grad_norm": 0.020162342116236687, + "learning_rate": 1.75e-05, + "loss": 0.0011, + "step": 16250 + }, + { + "epoch": 3.252, + "grad_norm": 0.009506700560450554, + "learning_rate": 1.7480000000000002e-05, + "loss": 0.0011, + "step": 16260 + }, + { + "epoch": 3.254, + "grad_norm": 0.02688973769545555, + "learning_rate": 1.7460000000000002e-05, + "loss": 0.0235, + "step": 16270 + }, + { + "epoch": 3.2560000000000002, + "grad_norm": 0.0014943861169740558, + "learning_rate": 1.7440000000000002e-05, + "loss": 0.0301, + "step": 16280 + }, + { + "epoch": 3.258, + "grad_norm": 0.008569997735321522, + "learning_rate": 1.742e-05, + "loss": 0.0105, + "step": 16290 + }, + { + "epoch": 3.26, + "grad_norm": 0.7792108058929443, + "learning_rate": 1.74e-05, + "loss": 0.0012, + "step": 16300 + }, + { + "epoch": 3.262, + "grad_norm": 0.034945737570524216, + "learning_rate": 1.7380000000000003e-05, + "loss": 0.0003, + "step": 16310 + }, + { + "epoch": 3.2640000000000002, + "grad_norm": 0.2685021162033081, + "learning_rate": 1.736e-05, + "loss": 0.0013, + "step": 16320 + }, + { + "epoch": 3.266, + "grad_norm": 0.0574602410197258, + "learning_rate": 1.734e-05, + "loss": 0.0005, + "step": 16330 + }, + { + "epoch": 3.268, + "grad_norm": 0.003966180607676506, + "learning_rate": 1.732e-05, + "loss": 0.0007, + "step": 16340 + }, + { + "epoch": 3.27, + "grad_norm": 4.431776523590088, + "learning_rate": 1.73e-05, + "loss": 0.004, + "step": 16350 + }, + { + "epoch": 3.2720000000000002, + "grad_norm": 0.0023266009520739317, + "learning_rate": 1.728e-05, + "loss": 0.0004, + "step": 16360 + }, + { + "epoch": 3.274, + "grad_norm": 0.006505758501589298, + "learning_rate": 1.726e-05, + "loss": 0.0014, + "step": 16370 + }, + { + "epoch": 3.276, + "grad_norm": 0.09441948682069778, + "learning_rate": 1.724e-05, + "loss": 0.0004, + "step": 16380 + }, + { + "epoch": 3.278, + "grad_norm": 0.06182579696178436, + "learning_rate": 1.722e-05, + "loss": 0.0117, + "step": 16390 + }, + { + "epoch": 3.2800000000000002, + "grad_norm": 7.060178756713867, + "learning_rate": 1.7199999999999998e-05, + "loss": 0.0468, + "step": 16400 + }, + { + "epoch": 3.282, + "grad_norm": 0.00426541967317462, + "learning_rate": 1.718e-05, + "loss": 0.0009, + "step": 16410 + }, + { + "epoch": 3.284, + "grad_norm": 0.008011597208678722, + "learning_rate": 1.7160000000000002e-05, + "loss": 0.0005, + "step": 16420 + }, + { + "epoch": 3.286, + "grad_norm": 0.00453008059412241, + "learning_rate": 1.7140000000000002e-05, + "loss": 0.0006, + "step": 16430 + }, + { + "epoch": 3.288, + "grad_norm": 0.005792681127786636, + "learning_rate": 1.712e-05, + "loss": 0.0012, + "step": 16440 + }, + { + "epoch": 3.29, + "grad_norm": 8.664570808410645, + "learning_rate": 1.7100000000000002e-05, + "loss": 0.0734, + "step": 16450 + }, + { + "epoch": 3.292, + "grad_norm": 0.02087589167058468, + "learning_rate": 1.7080000000000002e-05, + "loss": 0.0007, + "step": 16460 + }, + { + "epoch": 3.294, + "grad_norm": 0.01756032183766365, + "learning_rate": 1.706e-05, + "loss": 0.0337, + "step": 16470 + }, + { + "epoch": 3.296, + "grad_norm": 0.0026294682174921036, + "learning_rate": 1.704e-05, + "loss": 0.0353, + "step": 16480 + }, + { + "epoch": 3.298, + "grad_norm": 0.018317170441150665, + "learning_rate": 1.702e-05, + "loss": 0.0004, + "step": 16490 + }, + { + "epoch": 3.3, + "grad_norm": 0.022388644516468048, + "learning_rate": 1.7000000000000003e-05, + "loss": 0.0175, + "step": 16500 + }, + { + "epoch": 3.302, + "grad_norm": 0.003864126745611429, + "learning_rate": 1.698e-05, + "loss": 0.0621, + "step": 16510 + }, + { + "epoch": 3.304, + "grad_norm": 0.0023028342984616756, + "learning_rate": 1.696e-05, + "loss": 0.0025, + "step": 16520 + }, + { + "epoch": 3.306, + "grad_norm": 0.021012943238019943, + "learning_rate": 1.694e-05, + "loss": 0.0332, + "step": 16530 + }, + { + "epoch": 3.308, + "grad_norm": 0.003342842683196068, + "learning_rate": 1.692e-05, + "loss": 0.0206, + "step": 16540 + }, + { + "epoch": 3.31, + "grad_norm": 0.0015880778664723039, + "learning_rate": 1.69e-05, + "loss": 0.0007, + "step": 16550 + }, + { + "epoch": 3.312, + "grad_norm": 0.002224528230726719, + "learning_rate": 1.688e-05, + "loss": 0.0057, + "step": 16560 + }, + { + "epoch": 3.314, + "grad_norm": 0.0382666289806366, + "learning_rate": 1.686e-05, + "loss": 0.0006, + "step": 16570 + }, + { + "epoch": 3.316, + "grad_norm": 3.435798168182373, + "learning_rate": 1.684e-05, + "loss": 0.0117, + "step": 16580 + }, + { + "epoch": 3.318, + "grad_norm": 0.009352126158773899, + "learning_rate": 1.6819999999999998e-05, + "loss": 0.0035, + "step": 16590 + }, + { + "epoch": 3.32, + "grad_norm": 0.00058965221978724, + "learning_rate": 1.6800000000000002e-05, + "loss": 0.0511, + "step": 16600 + }, + { + "epoch": 3.322, + "grad_norm": 0.0026318279560655355, + "learning_rate": 1.6780000000000002e-05, + "loss": 0.0196, + "step": 16610 + }, + { + "epoch": 3.324, + "grad_norm": 0.011510156095027924, + "learning_rate": 1.6760000000000002e-05, + "loss": 0.0004, + "step": 16620 + }, + { + "epoch": 3.326, + "grad_norm": 0.008059993386268616, + "learning_rate": 1.674e-05, + "loss": 0.0006, + "step": 16630 + }, + { + "epoch": 3.328, + "grad_norm": 0.0023515725042670965, + "learning_rate": 1.672e-05, + "loss": 0.0015, + "step": 16640 + }, + { + "epoch": 3.33, + "grad_norm": 0.006723460275679827, + "learning_rate": 1.6700000000000003e-05, + "loss": 0.0005, + "step": 16650 + }, + { + "epoch": 3.332, + "grad_norm": 0.02267891727387905, + "learning_rate": 1.668e-05, + "loss": 0.0004, + "step": 16660 + }, + { + "epoch": 3.334, + "grad_norm": 0.04994427040219307, + "learning_rate": 1.666e-05, + "loss": 0.0664, + "step": 16670 + }, + { + "epoch": 3.336, + "grad_norm": 0.023461012169718742, + "learning_rate": 1.664e-05, + "loss": 0.0242, + "step": 16680 + }, + { + "epoch": 3.338, + "grad_norm": 0.006013381294906139, + "learning_rate": 1.662e-05, + "loss": 0.0421, + "step": 16690 + }, + { + "epoch": 3.34, + "grad_norm": 1.3734526634216309, + "learning_rate": 1.66e-05, + "loss": 0.0533, + "step": 16700 + }, + { + "epoch": 3.342, + "grad_norm": 0.006099446676671505, + "learning_rate": 1.658e-05, + "loss": 0.0682, + "step": 16710 + }, + { + "epoch": 3.344, + "grad_norm": 0.0032595868688076735, + "learning_rate": 1.656e-05, + "loss": 0.0126, + "step": 16720 + }, + { + "epoch": 3.346, + "grad_norm": 2.1808266639709473, + "learning_rate": 1.654e-05, + "loss": 0.0465, + "step": 16730 + }, + { + "epoch": 3.348, + "grad_norm": 0.019032666459679604, + "learning_rate": 1.652e-05, + "loss": 0.0009, + "step": 16740 + }, + { + "epoch": 3.35, + "grad_norm": 0.01130975503474474, + "learning_rate": 1.65e-05, + "loss": 0.013, + "step": 16750 + }, + { + "epoch": 3.352, + "grad_norm": 0.00591658428311348, + "learning_rate": 1.648e-05, + "loss": 0.0008, + "step": 16760 + }, + { + "epoch": 3.354, + "grad_norm": 0.0044304500333964825, + "learning_rate": 1.646e-05, + "loss": 0.0043, + "step": 16770 + }, + { + "epoch": 3.356, + "grad_norm": 0.15547245740890503, + "learning_rate": 1.644e-05, + "loss": 0.058, + "step": 16780 + }, + { + "epoch": 3.358, + "grad_norm": 0.021581124514341354, + "learning_rate": 1.6420000000000002e-05, + "loss": 0.0017, + "step": 16790 + }, + { + "epoch": 3.36, + "grad_norm": 0.06020371988415718, + "learning_rate": 1.6400000000000002e-05, + "loss": 0.0276, + "step": 16800 + }, + { + "epoch": 3.362, + "grad_norm": 12.31493091583252, + "learning_rate": 1.6380000000000002e-05, + "loss": 0.0159, + "step": 16810 + }, + { + "epoch": 3.364, + "grad_norm": 0.016272541135549545, + "learning_rate": 1.636e-05, + "loss": 0.0587, + "step": 16820 + }, + { + "epoch": 3.366, + "grad_norm": 0.060270678251981735, + "learning_rate": 1.634e-05, + "loss": 0.0007, + "step": 16830 + }, + { + "epoch": 3.368, + "grad_norm": 0.03014090657234192, + "learning_rate": 1.6320000000000003e-05, + "loss": 0.0007, + "step": 16840 + }, + { + "epoch": 3.37, + "grad_norm": 0.012263710610568523, + "learning_rate": 1.63e-05, + "loss": 0.0442, + "step": 16850 + }, + { + "epoch": 3.372, + "grad_norm": 0.1727428138256073, + "learning_rate": 1.628e-05, + "loss": 0.0189, + "step": 16860 + }, + { + "epoch": 3.374, + "grad_norm": 0.016265198588371277, + "learning_rate": 1.626e-05, + "loss": 0.022, + "step": 16870 + }, + { + "epoch": 3.376, + "grad_norm": 0.0468362420797348, + "learning_rate": 1.624e-05, + "loss": 0.0008, + "step": 16880 + }, + { + "epoch": 3.378, + "grad_norm": 0.015331187285482883, + "learning_rate": 1.622e-05, + "loss": 0.0006, + "step": 16890 + }, + { + "epoch": 3.38, + "grad_norm": 0.05118807777762413, + "learning_rate": 1.62e-05, + "loss": 0.0023, + "step": 16900 + }, + { + "epoch": 3.382, + "grad_norm": 0.01582205668091774, + "learning_rate": 1.618e-05, + "loss": 0.0292, + "step": 16910 + }, + { + "epoch": 3.384, + "grad_norm": 2.682358503341675, + "learning_rate": 1.616e-05, + "loss": 0.0025, + "step": 16920 + }, + { + "epoch": 3.386, + "grad_norm": 0.013448044657707214, + "learning_rate": 1.6139999999999998e-05, + "loss": 0.0019, + "step": 16930 + }, + { + "epoch": 3.388, + "grad_norm": 0.005577258300036192, + "learning_rate": 1.612e-05, + "loss": 0.0007, + "step": 16940 + }, + { + "epoch": 3.39, + "grad_norm": 0.01986427791416645, + "learning_rate": 1.6100000000000002e-05, + "loss": 0.0007, + "step": 16950 + }, + { + "epoch": 3.392, + "grad_norm": 0.1588340550661087, + "learning_rate": 1.6080000000000002e-05, + "loss": 0.0384, + "step": 16960 + }, + { + "epoch": 3.394, + "grad_norm": 0.0015054995892569423, + "learning_rate": 1.606e-05, + "loss": 0.0006, + "step": 16970 + }, + { + "epoch": 3.396, + "grad_norm": 0.012828778475522995, + "learning_rate": 1.604e-05, + "loss": 0.0016, + "step": 16980 + }, + { + "epoch": 3.398, + "grad_norm": 0.019603174179792404, + "learning_rate": 1.6020000000000002e-05, + "loss": 0.0003, + "step": 16990 + }, + { + "epoch": 3.4, + "grad_norm": 0.02867083251476288, + "learning_rate": 1.6000000000000003e-05, + "loss": 0.0007, + "step": 17000 + }, + { + "epoch": 3.402, + "grad_norm": 0.026190217584371567, + "learning_rate": 1.598e-05, + "loss": 0.0013, + "step": 17010 + }, + { + "epoch": 3.404, + "grad_norm": 0.001176636666059494, + "learning_rate": 1.596e-05, + "loss": 0.002, + "step": 17020 + }, + { + "epoch": 3.406, + "grad_norm": 0.0011765063973143697, + "learning_rate": 1.594e-05, + "loss": 0.0004, + "step": 17030 + }, + { + "epoch": 3.408, + "grad_norm": 0.001738901250064373, + "learning_rate": 1.592e-05, + "loss": 0.0004, + "step": 17040 + }, + { + "epoch": 3.41, + "grad_norm": 0.007277410943061113, + "learning_rate": 1.59e-05, + "loss": 0.0003, + "step": 17050 + }, + { + "epoch": 3.412, + "grad_norm": 0.0007897934992797673, + "learning_rate": 1.588e-05, + "loss": 0.0099, + "step": 17060 + }, + { + "epoch": 3.414, + "grad_norm": 0.22555957734584808, + "learning_rate": 1.586e-05, + "loss": 0.1061, + "step": 17070 + }, + { + "epoch": 3.416, + "grad_norm": 0.005760278087109327, + "learning_rate": 1.584e-05, + "loss": 0.0169, + "step": 17080 + }, + { + "epoch": 3.418, + "grad_norm": 0.00394419115036726, + "learning_rate": 1.582e-05, + "loss": 0.0021, + "step": 17090 + }, + { + "epoch": 3.42, + "grad_norm": 0.008633914403617382, + "learning_rate": 1.58e-05, + "loss": 0.0012, + "step": 17100 + }, + { + "epoch": 3.422, + "grad_norm": 0.014838412404060364, + "learning_rate": 1.578e-05, + "loss": 0.027, + "step": 17110 + }, + { + "epoch": 3.424, + "grad_norm": 0.005310640204697847, + "learning_rate": 1.5759999999999998e-05, + "loss": 0.0007, + "step": 17120 + }, + { + "epoch": 3.426, + "grad_norm": 0.003743154229596257, + "learning_rate": 1.5740000000000002e-05, + "loss": 0.0004, + "step": 17130 + }, + { + "epoch": 3.428, + "grad_norm": 6.841172218322754, + "learning_rate": 1.5720000000000002e-05, + "loss": 0.0212, + "step": 17140 + }, + { + "epoch": 3.43, + "grad_norm": 0.0082596680149436, + "learning_rate": 1.5700000000000002e-05, + "loss": 0.0029, + "step": 17150 + }, + { + "epoch": 3.432, + "grad_norm": 0.004711842630058527, + "learning_rate": 1.568e-05, + "loss": 0.0079, + "step": 17160 + }, + { + "epoch": 3.434, + "grad_norm": 0.0029079318046569824, + "learning_rate": 1.566e-05, + "loss": 0.0002, + "step": 17170 + }, + { + "epoch": 3.436, + "grad_norm": 0.0035103964619338512, + "learning_rate": 1.5640000000000003e-05, + "loss": 0.0006, + "step": 17180 + }, + { + "epoch": 3.438, + "grad_norm": 0.0038129487074911594, + "learning_rate": 1.5620000000000003e-05, + "loss": 0.0006, + "step": 17190 + }, + { + "epoch": 3.44, + "grad_norm": 0.025164583697915077, + "learning_rate": 1.56e-05, + "loss": 0.0003, + "step": 17200 + }, + { + "epoch": 3.442, + "grad_norm": 0.0032029796857386827, + "learning_rate": 1.558e-05, + "loss": 0.002, + "step": 17210 + }, + { + "epoch": 3.444, + "grad_norm": 0.008071298711001873, + "learning_rate": 1.556e-05, + "loss": 0.0092, + "step": 17220 + }, + { + "epoch": 3.446, + "grad_norm": 0.025147704407572746, + "learning_rate": 1.554e-05, + "loss": 0.0006, + "step": 17230 + }, + { + "epoch": 3.448, + "grad_norm": 0.0043593524023890495, + "learning_rate": 1.552e-05, + "loss": 0.0002, + "step": 17240 + }, + { + "epoch": 3.45, + "grad_norm": 0.0024096034467220306, + "learning_rate": 1.55e-05, + "loss": 0.024, + "step": 17250 + }, + { + "epoch": 3.452, + "grad_norm": 2.802278518676758, + "learning_rate": 1.548e-05, + "loss": 0.0362, + "step": 17260 + }, + { + "epoch": 3.454, + "grad_norm": 0.009577326476573944, + "learning_rate": 1.546e-05, + "loss": 0.0123, + "step": 17270 + }, + { + "epoch": 3.456, + "grad_norm": 0.0024973542895168066, + "learning_rate": 1.544e-05, + "loss": 0.0022, + "step": 17280 + }, + { + "epoch": 3.458, + "grad_norm": 0.003990876022726297, + "learning_rate": 1.542e-05, + "loss": 0.0678, + "step": 17290 + }, + { + "epoch": 3.46, + "grad_norm": 0.02418040670454502, + "learning_rate": 1.54e-05, + "loss": 0.0028, + "step": 17300 + }, + { + "epoch": 3.462, + "grad_norm": 0.002280684420838952, + "learning_rate": 1.538e-05, + "loss": 0.0012, + "step": 17310 + }, + { + "epoch": 3.464, + "grad_norm": 0.6858948469161987, + "learning_rate": 1.536e-05, + "loss": 0.0012, + "step": 17320 + }, + { + "epoch": 3.466, + "grad_norm": 0.00416453555226326, + "learning_rate": 1.5340000000000002e-05, + "loss": 0.0099, + "step": 17330 + }, + { + "epoch": 3.468, + "grad_norm": 8.234037399291992, + "learning_rate": 1.5320000000000002e-05, + "loss": 0.0406, + "step": 17340 + }, + { + "epoch": 3.4699999999999998, + "grad_norm": 0.011349360458552837, + "learning_rate": 1.53e-05, + "loss": 0.0003, + "step": 17350 + }, + { + "epoch": 3.472, + "grad_norm": 0.011367256753146648, + "learning_rate": 1.528e-05, + "loss": 0.0439, + "step": 17360 + }, + { + "epoch": 3.474, + "grad_norm": 0.006158503703773022, + "learning_rate": 1.5260000000000003e-05, + "loss": 0.0162, + "step": 17370 + }, + { + "epoch": 3.476, + "grad_norm": 0.003388832788914442, + "learning_rate": 1.5240000000000001e-05, + "loss": 0.0015, + "step": 17380 + }, + { + "epoch": 3.4779999999999998, + "grad_norm": 0.09154608845710754, + "learning_rate": 1.5220000000000002e-05, + "loss": 0.0595, + "step": 17390 + }, + { + "epoch": 3.48, + "grad_norm": 0.0007251560455188155, + "learning_rate": 1.52e-05, + "loss": 0.0015, + "step": 17400 + }, + { + "epoch": 3.482, + "grad_norm": 0.0014719640603289008, + "learning_rate": 1.518e-05, + "loss": 0.0272, + "step": 17410 + }, + { + "epoch": 3.484, + "grad_norm": 0.004138593561947346, + "learning_rate": 1.5160000000000002e-05, + "loss": 0.0039, + "step": 17420 + }, + { + "epoch": 3.4859999999999998, + "grad_norm": 0.002763948170468211, + "learning_rate": 1.514e-05, + "loss": 0.0447, + "step": 17430 + }, + { + "epoch": 3.488, + "grad_norm": 0.002828595694154501, + "learning_rate": 1.5120000000000001e-05, + "loss": 0.0004, + "step": 17440 + }, + { + "epoch": 3.49, + "grad_norm": 0.007722462061792612, + "learning_rate": 1.51e-05, + "loss": 0.0088, + "step": 17450 + }, + { + "epoch": 3.492, + "grad_norm": 0.024451201781630516, + "learning_rate": 1.508e-05, + "loss": 0.0034, + "step": 17460 + }, + { + "epoch": 3.4939999999999998, + "grad_norm": 0.01881391368806362, + "learning_rate": 1.5060000000000001e-05, + "loss": 0.0377, + "step": 17470 + }, + { + "epoch": 3.496, + "grad_norm": 0.004509866703301668, + "learning_rate": 1.5040000000000002e-05, + "loss": 0.0004, + "step": 17480 + }, + { + "epoch": 3.498, + "grad_norm": 0.09744896739721298, + "learning_rate": 1.502e-05, + "loss": 0.0116, + "step": 17490 + }, + { + "epoch": 3.5, + "grad_norm": 0.9149200320243835, + "learning_rate": 1.5e-05, + "loss": 0.0302, + "step": 17500 + }, + { + "epoch": 3.502, + "grad_norm": 0.014574715867638588, + "learning_rate": 1.4979999999999999e-05, + "loss": 0.0005, + "step": 17510 + }, + { + "epoch": 3.504, + "grad_norm": 0.0013671967899426818, + "learning_rate": 1.4960000000000002e-05, + "loss": 0.0004, + "step": 17520 + }, + { + "epoch": 3.5060000000000002, + "grad_norm": 0.044884104281663895, + "learning_rate": 1.4940000000000001e-05, + "loss": 0.0034, + "step": 17530 + }, + { + "epoch": 3.508, + "grad_norm": 0.002155133057385683, + "learning_rate": 1.4920000000000001e-05, + "loss": 0.0251, + "step": 17540 + }, + { + "epoch": 3.51, + "grad_norm": 0.005239092279225588, + "learning_rate": 1.49e-05, + "loss": 0.0031, + "step": 17550 + }, + { + "epoch": 3.512, + "grad_norm": 0.004131457768380642, + "learning_rate": 1.488e-05, + "loss": 0.0315, + "step": 17560 + }, + { + "epoch": 3.5140000000000002, + "grad_norm": 0.0030689926352351904, + "learning_rate": 1.4860000000000002e-05, + "loss": 0.0001, + "step": 17570 + }, + { + "epoch": 3.516, + "grad_norm": 0.02878253161907196, + "learning_rate": 1.4840000000000002e-05, + "loss": 0.0379, + "step": 17580 + }, + { + "epoch": 3.518, + "grad_norm": 0.011847962625324726, + "learning_rate": 1.482e-05, + "loss": 0.0004, + "step": 17590 + }, + { + "epoch": 3.52, + "grad_norm": 0.00936802290380001, + "learning_rate": 1.48e-05, + "loss": 0.0144, + "step": 17600 + }, + { + "epoch": 3.5220000000000002, + "grad_norm": 0.0026822967920452356, + "learning_rate": 1.4779999999999999e-05, + "loss": 0.0002, + "step": 17610 + }, + { + "epoch": 3.524, + "grad_norm": 0.03320813924074173, + "learning_rate": 1.4760000000000001e-05, + "loss": 0.0166, + "step": 17620 + }, + { + "epoch": 3.526, + "grad_norm": 0.0031809851061552763, + "learning_rate": 1.4740000000000001e-05, + "loss": 0.0004, + "step": 17630 + }, + { + "epoch": 3.528, + "grad_norm": 0.0020093093626201153, + "learning_rate": 1.472e-05, + "loss": 0.0374, + "step": 17640 + }, + { + "epoch": 3.5300000000000002, + "grad_norm": 0.013656358234584332, + "learning_rate": 1.47e-05, + "loss": 0.0003, + "step": 17650 + }, + { + "epoch": 3.532, + "grad_norm": 0.0178862102329731, + "learning_rate": 1.4680000000000002e-05, + "loss": 0.0002, + "step": 17660 + }, + { + "epoch": 3.534, + "grad_norm": 0.010191562585532665, + "learning_rate": 1.4660000000000002e-05, + "loss": 0.0513, + "step": 17670 + }, + { + "epoch": 3.536, + "grad_norm": 0.0026134855579584837, + "learning_rate": 1.464e-05, + "loss": 0.0464, + "step": 17680 + }, + { + "epoch": 3.5380000000000003, + "grad_norm": 0.009755929931998253, + "learning_rate": 1.462e-05, + "loss": 0.0054, + "step": 17690 + }, + { + "epoch": 3.54, + "grad_norm": 0.015786219388246536, + "learning_rate": 1.4599999999999999e-05, + "loss": 0.0214, + "step": 17700 + }, + { + "epoch": 3.542, + "grad_norm": 0.005827093962579966, + "learning_rate": 1.4580000000000003e-05, + "loss": 0.0005, + "step": 17710 + }, + { + "epoch": 3.544, + "grad_norm": 0.004923023749142885, + "learning_rate": 1.4560000000000001e-05, + "loss": 0.0279, + "step": 17720 + }, + { + "epoch": 3.5460000000000003, + "grad_norm": 0.002227694494649768, + "learning_rate": 1.4540000000000001e-05, + "loss": 0.0039, + "step": 17730 + }, + { + "epoch": 3.548, + "grad_norm": 0.0029734703712165356, + "learning_rate": 1.452e-05, + "loss": 0.0004, + "step": 17740 + }, + { + "epoch": 3.55, + "grad_norm": 4.266994476318359, + "learning_rate": 1.45e-05, + "loss": 0.087, + "step": 17750 + }, + { + "epoch": 3.552, + "grad_norm": 0.007924766279757023, + "learning_rate": 1.4480000000000002e-05, + "loss": 0.0007, + "step": 17760 + }, + { + "epoch": 3.5540000000000003, + "grad_norm": 0.00312436418607831, + "learning_rate": 1.4460000000000002e-05, + "loss": 0.0108, + "step": 17770 + }, + { + "epoch": 3.556, + "grad_norm": 0.004613260738551617, + "learning_rate": 1.444e-05, + "loss": 0.0265, + "step": 17780 + }, + { + "epoch": 3.558, + "grad_norm": 0.014302493073046207, + "learning_rate": 1.4420000000000001e-05, + "loss": 0.0004, + "step": 17790 + }, + { + "epoch": 3.56, + "grad_norm": 0.024875864386558533, + "learning_rate": 1.44e-05, + "loss": 0.0303, + "step": 17800 + }, + { + "epoch": 3.5620000000000003, + "grad_norm": 0.04414631798863411, + "learning_rate": 1.4380000000000001e-05, + "loss": 0.0074, + "step": 17810 + }, + { + "epoch": 3.564, + "grad_norm": 0.03683282062411308, + "learning_rate": 1.4360000000000001e-05, + "loss": 0.0457, + "step": 17820 + }, + { + "epoch": 3.566, + "grad_norm": 0.021348169073462486, + "learning_rate": 1.434e-05, + "loss": 0.0018, + "step": 17830 + }, + { + "epoch": 3.568, + "grad_norm": 0.002948606386780739, + "learning_rate": 1.432e-05, + "loss": 0.0255, + "step": 17840 + }, + { + "epoch": 3.57, + "grad_norm": 0.7394766211509705, + "learning_rate": 1.43e-05, + "loss": 0.001, + "step": 17850 + }, + { + "epoch": 3.572, + "grad_norm": 0.14344385266304016, + "learning_rate": 1.4280000000000002e-05, + "loss": 0.0008, + "step": 17860 + }, + { + "epoch": 3.574, + "grad_norm": 0.007859619334340096, + "learning_rate": 1.426e-05, + "loss": 0.0007, + "step": 17870 + }, + { + "epoch": 3.576, + "grad_norm": 0.003310305532068014, + "learning_rate": 1.4240000000000001e-05, + "loss": 0.0005, + "step": 17880 + }, + { + "epoch": 3.578, + "grad_norm": 0.011208192445337772, + "learning_rate": 1.422e-05, + "loss": 0.0007, + "step": 17890 + }, + { + "epoch": 3.58, + "grad_norm": 0.0034019567538052797, + "learning_rate": 1.42e-05, + "loss": 0.0006, + "step": 17900 + }, + { + "epoch": 3.582, + "grad_norm": 1.4686945676803589, + "learning_rate": 1.4180000000000001e-05, + "loss": 0.002, + "step": 17910 + }, + { + "epoch": 3.584, + "grad_norm": 0.0022530239075422287, + "learning_rate": 1.4160000000000002e-05, + "loss": 0.0004, + "step": 17920 + }, + { + "epoch": 3.586, + "grad_norm": 0.003713824087753892, + "learning_rate": 1.414e-05, + "loss": 0.0262, + "step": 17930 + }, + { + "epoch": 3.588, + "grad_norm": 0.003505872329697013, + "learning_rate": 1.412e-05, + "loss": 0.0012, + "step": 17940 + }, + { + "epoch": 3.59, + "grad_norm": 0.012412111274898052, + "learning_rate": 1.4099999999999999e-05, + "loss": 0.0004, + "step": 17950 + }, + { + "epoch": 3.592, + "grad_norm": 0.0049460954032838345, + "learning_rate": 1.408e-05, + "loss": 0.0231, + "step": 17960 + }, + { + "epoch": 3.594, + "grad_norm": 0.007156440056860447, + "learning_rate": 1.4060000000000001e-05, + "loss": 0.0004, + "step": 17970 + }, + { + "epoch": 3.596, + "grad_norm": 0.27144309878349304, + "learning_rate": 1.4040000000000001e-05, + "loss": 0.0196, + "step": 17980 + }, + { + "epoch": 3.598, + "grad_norm": 0.025920584797859192, + "learning_rate": 1.402e-05, + "loss": 0.0036, + "step": 17990 + }, + { + "epoch": 3.6, + "grad_norm": 0.005095527973026037, + "learning_rate": 1.4000000000000001e-05, + "loss": 0.012, + "step": 18000 + }, + { + "epoch": 3.602, + "grad_norm": 0.0032979496754705906, + "learning_rate": 1.3980000000000002e-05, + "loss": 0.0073, + "step": 18010 + }, + { + "epoch": 3.604, + "grad_norm": 0.0015533601399511099, + "learning_rate": 1.396e-05, + "loss": 0.0817, + "step": 18020 + }, + { + "epoch": 3.606, + "grad_norm": 0.005558553151786327, + "learning_rate": 1.394e-05, + "loss": 0.0022, + "step": 18030 + }, + { + "epoch": 3.608, + "grad_norm": 0.01786387525498867, + "learning_rate": 1.3919999999999999e-05, + "loss": 0.0003, + "step": 18040 + }, + { + "epoch": 3.61, + "grad_norm": 0.006026718765497208, + "learning_rate": 1.3900000000000002e-05, + "loss": 0.0007, + "step": 18050 + }, + { + "epoch": 3.612, + "grad_norm": 0.008753432892262936, + "learning_rate": 1.3880000000000001e-05, + "loss": 0.0007, + "step": 18060 + }, + { + "epoch": 3.614, + "grad_norm": 0.012564820237457752, + "learning_rate": 1.3860000000000001e-05, + "loss": 0.0007, + "step": 18070 + }, + { + "epoch": 3.616, + "grad_norm": 0.03756599500775337, + "learning_rate": 1.384e-05, + "loss": 0.0011, + "step": 18080 + }, + { + "epoch": 3.618, + "grad_norm": 0.002842110814526677, + "learning_rate": 1.382e-05, + "loss": 0.0256, + "step": 18090 + }, + { + "epoch": 3.62, + "grad_norm": 3.56378173828125, + "learning_rate": 1.3800000000000002e-05, + "loss": 0.0338, + "step": 18100 + }, + { + "epoch": 3.622, + "grad_norm": 0.005345380399376154, + "learning_rate": 1.3780000000000002e-05, + "loss": 0.0002, + "step": 18110 + }, + { + "epoch": 3.624, + "grad_norm": 0.004506293218582869, + "learning_rate": 1.376e-05, + "loss": 0.0003, + "step": 18120 + }, + { + "epoch": 3.626, + "grad_norm": 3.007185697555542, + "learning_rate": 1.374e-05, + "loss": 0.0373, + "step": 18130 + }, + { + "epoch": 3.628, + "grad_norm": 0.027455611154437065, + "learning_rate": 1.3719999999999999e-05, + "loss": 0.0003, + "step": 18140 + }, + { + "epoch": 3.63, + "grad_norm": 0.004361871164292097, + "learning_rate": 1.3700000000000001e-05, + "loss": 0.0761, + "step": 18150 + }, + { + "epoch": 3.632, + "grad_norm": 0.49487006664276123, + "learning_rate": 1.3680000000000001e-05, + "loss": 0.0009, + "step": 18160 + }, + { + "epoch": 3.634, + "grad_norm": 0.0041132434271276, + "learning_rate": 1.3660000000000001e-05, + "loss": 0.0012, + "step": 18170 + }, + { + "epoch": 3.636, + "grad_norm": 0.007894878275692463, + "learning_rate": 1.364e-05, + "loss": 0.0035, + "step": 18180 + }, + { + "epoch": 3.638, + "grad_norm": 0.003962747287005186, + "learning_rate": 1.362e-05, + "loss": 0.0014, + "step": 18190 + }, + { + "epoch": 3.64, + "grad_norm": 0.007062731776386499, + "learning_rate": 1.3600000000000002e-05, + "loss": 0.0007, + "step": 18200 + }, + { + "epoch": 3.642, + "grad_norm": 9.764742851257324, + "learning_rate": 1.358e-05, + "loss": 0.0567, + "step": 18210 + }, + { + "epoch": 3.644, + "grad_norm": 0.01080237701535225, + "learning_rate": 1.356e-05, + "loss": 0.0033, + "step": 18220 + }, + { + "epoch": 3.646, + "grad_norm": 0.0007170343305915594, + "learning_rate": 1.3539999999999999e-05, + "loss": 0.0006, + "step": 18230 + }, + { + "epoch": 3.648, + "grad_norm": 6.265512466430664, + "learning_rate": 1.352e-05, + "loss": 0.0549, + "step": 18240 + }, + { + "epoch": 3.65, + "grad_norm": 0.11348658055067062, + "learning_rate": 1.3500000000000001e-05, + "loss": 0.0355, + "step": 18250 + }, + { + "epoch": 3.652, + "grad_norm": 3.7011401653289795, + "learning_rate": 1.3480000000000001e-05, + "loss": 0.052, + "step": 18260 + }, + { + "epoch": 3.654, + "grad_norm": 0.004853812512010336, + "learning_rate": 1.346e-05, + "loss": 0.0002, + "step": 18270 + }, + { + "epoch": 3.656, + "grad_norm": 0.0019084862433373928, + "learning_rate": 1.344e-05, + "loss": 0.0063, + "step": 18280 + }, + { + "epoch": 3.658, + "grad_norm": 0.002195891458541155, + "learning_rate": 1.3420000000000002e-05, + "loss": 0.0062, + "step": 18290 + }, + { + "epoch": 3.66, + "grad_norm": 0.028412865474820137, + "learning_rate": 1.3400000000000002e-05, + "loss": 0.0125, + "step": 18300 + }, + { + "epoch": 3.662, + "grad_norm": 0.02995235100388527, + "learning_rate": 1.338e-05, + "loss": 0.0308, + "step": 18310 + }, + { + "epoch": 3.664, + "grad_norm": 0.05932564288377762, + "learning_rate": 1.336e-05, + "loss": 0.0178, + "step": 18320 + }, + { + "epoch": 3.666, + "grad_norm": 4.091817855834961, + "learning_rate": 1.334e-05, + "loss": 0.0817, + "step": 18330 + }, + { + "epoch": 3.668, + "grad_norm": 0.007290849462151527, + "learning_rate": 1.3320000000000001e-05, + "loss": 0.0351, + "step": 18340 + }, + { + "epoch": 3.67, + "grad_norm": 0.0027278633788228035, + "learning_rate": 1.3300000000000001e-05, + "loss": 0.0011, + "step": 18350 + }, + { + "epoch": 3.672, + "grad_norm": 0.010340756736695766, + "learning_rate": 1.3280000000000002e-05, + "loss": 0.0014, + "step": 18360 + }, + { + "epoch": 3.674, + "grad_norm": 0.001625576289370656, + "learning_rate": 1.326e-05, + "loss": 0.0005, + "step": 18370 + }, + { + "epoch": 3.676, + "grad_norm": 0.013287074863910675, + "learning_rate": 1.324e-05, + "loss": 0.0008, + "step": 18380 + }, + { + "epoch": 3.678, + "grad_norm": 0.05007312819361687, + "learning_rate": 1.3220000000000002e-05, + "loss": 0.0008, + "step": 18390 + }, + { + "epoch": 3.68, + "grad_norm": 0.050255417823791504, + "learning_rate": 1.32e-05, + "loss": 0.0091, + "step": 18400 + }, + { + "epoch": 3.682, + "grad_norm": 0.004707262851297855, + "learning_rate": 1.3180000000000001e-05, + "loss": 0.0009, + "step": 18410 + }, + { + "epoch": 3.684, + "grad_norm": 0.0015249394346028566, + "learning_rate": 1.316e-05, + "loss": 0.0296, + "step": 18420 + }, + { + "epoch": 3.686, + "grad_norm": 0.06252807378768921, + "learning_rate": 1.314e-05, + "loss": 0.0059, + "step": 18430 + }, + { + "epoch": 3.6879999999999997, + "grad_norm": 0.02390551008284092, + "learning_rate": 1.3120000000000001e-05, + "loss": 0.0157, + "step": 18440 + }, + { + "epoch": 3.69, + "grad_norm": 0.009191851131618023, + "learning_rate": 1.3100000000000002e-05, + "loss": 0.033, + "step": 18450 + }, + { + "epoch": 3.692, + "grad_norm": 0.020053191110491753, + "learning_rate": 1.308e-05, + "loss": 0.002, + "step": 18460 + }, + { + "epoch": 3.694, + "grad_norm": 0.0027913597878068686, + "learning_rate": 1.306e-05, + "loss": 0.0034, + "step": 18470 + }, + { + "epoch": 3.6959999999999997, + "grad_norm": 5.262605667114258, + "learning_rate": 1.3039999999999999e-05, + "loss": 0.0049, + "step": 18480 + }, + { + "epoch": 3.698, + "grad_norm": 0.0016802336322143674, + "learning_rate": 1.3020000000000002e-05, + "loss": 0.0023, + "step": 18490 + }, + { + "epoch": 3.7, + "grad_norm": 0.002672248985618353, + "learning_rate": 1.3000000000000001e-05, + "loss": 0.0013, + "step": 18500 + }, + { + "epoch": 3.702, + "grad_norm": 0.0664878785610199, + "learning_rate": 1.2980000000000001e-05, + "loss": 0.0016, + "step": 18510 + }, + { + "epoch": 3.7039999999999997, + "grad_norm": 3.239924192428589, + "learning_rate": 1.296e-05, + "loss": 0.0425, + "step": 18520 + }, + { + "epoch": 3.706, + "grad_norm": 0.0018718920182436705, + "learning_rate": 1.294e-05, + "loss": 0.0198, + "step": 18530 + }, + { + "epoch": 3.708, + "grad_norm": 0.01981085166335106, + "learning_rate": 1.2920000000000002e-05, + "loss": 0.0013, + "step": 18540 + }, + { + "epoch": 3.71, + "grad_norm": 0.0019104615785181522, + "learning_rate": 1.29e-05, + "loss": 0.0006, + "step": 18550 + }, + { + "epoch": 3.7119999999999997, + "grad_norm": 0.05587019398808479, + "learning_rate": 1.288e-05, + "loss": 0.1196, + "step": 18560 + }, + { + "epoch": 3.714, + "grad_norm": 0.14568588137626648, + "learning_rate": 1.286e-05, + "loss": 0.0008, + "step": 18570 + }, + { + "epoch": 3.716, + "grad_norm": 0.008522302843630314, + "learning_rate": 1.2839999999999999e-05, + "loss": 0.0346, + "step": 18580 + }, + { + "epoch": 3.718, + "grad_norm": 0.0016074853483587503, + "learning_rate": 1.2820000000000001e-05, + "loss": 0.01, + "step": 18590 + }, + { + "epoch": 3.7199999999999998, + "grad_norm": 0.021763231605291367, + "learning_rate": 1.2800000000000001e-05, + "loss": 0.0312, + "step": 18600 + }, + { + "epoch": 3.722, + "grad_norm": 0.1613340526819229, + "learning_rate": 1.278e-05, + "loss": 0.0035, + "step": 18610 + }, + { + "epoch": 3.724, + "grad_norm": 0.03853601589798927, + "learning_rate": 1.276e-05, + "loss": 0.0007, + "step": 18620 + }, + { + "epoch": 3.726, + "grad_norm": 0.012178819626569748, + "learning_rate": 1.2740000000000002e-05, + "loss": 0.0027, + "step": 18630 + }, + { + "epoch": 3.7279999999999998, + "grad_norm": 0.009596653282642365, + "learning_rate": 1.2720000000000002e-05, + "loss": 0.0153, + "step": 18640 + }, + { + "epoch": 3.73, + "grad_norm": 0.030010340735316277, + "learning_rate": 1.27e-05, + "loss": 0.0039, + "step": 18650 + }, + { + "epoch": 3.732, + "grad_norm": 0.005089831538498402, + "learning_rate": 1.268e-05, + "loss": 0.0002, + "step": 18660 + }, + { + "epoch": 3.734, + "grad_norm": 0.018710069358348846, + "learning_rate": 1.2659999999999999e-05, + "loss": 0.0272, + "step": 18670 + }, + { + "epoch": 3.7359999999999998, + "grad_norm": 0.0020607642363756895, + "learning_rate": 1.2640000000000003e-05, + "loss": 0.0003, + "step": 18680 + }, + { + "epoch": 3.738, + "grad_norm": 0.0018226850079372525, + "learning_rate": 1.2620000000000001e-05, + "loss": 0.0004, + "step": 18690 + }, + { + "epoch": 3.74, + "grad_norm": 0.0032087010331451893, + "learning_rate": 1.2600000000000001e-05, + "loss": 0.0067, + "step": 18700 + }, + { + "epoch": 3.742, + "grad_norm": 0.002074528718367219, + "learning_rate": 1.258e-05, + "loss": 0.0002, + "step": 18710 + }, + { + "epoch": 3.7439999999999998, + "grad_norm": 0.009091685526072979, + "learning_rate": 1.256e-05, + "loss": 0.0001, + "step": 18720 + }, + { + "epoch": 3.746, + "grad_norm": 0.0004810831160284579, + "learning_rate": 1.2540000000000002e-05, + "loss": 0.0007, + "step": 18730 + }, + { + "epoch": 3.748, + "grad_norm": 0.010521749965846539, + "learning_rate": 1.252e-05, + "loss": 0.0009, + "step": 18740 + }, + { + "epoch": 3.75, + "grad_norm": 5.149264335632324, + "learning_rate": 1.25e-05, + "loss": 0.1288, + "step": 18750 + }, + { + "epoch": 3.752, + "grad_norm": 0.005692175589501858, + "learning_rate": 1.248e-05, + "loss": 0.0241, + "step": 18760 + }, + { + "epoch": 3.754, + "grad_norm": 0.04272817447781563, + "learning_rate": 1.2460000000000001e-05, + "loss": 0.0011, + "step": 18770 + }, + { + "epoch": 3.7560000000000002, + "grad_norm": 4.0740203857421875, + "learning_rate": 1.244e-05, + "loss": 0.0046, + "step": 18780 + }, + { + "epoch": 3.758, + "grad_norm": 0.020532237365841866, + "learning_rate": 1.2420000000000001e-05, + "loss": 0.0007, + "step": 18790 + }, + { + "epoch": 3.76, + "grad_norm": 0.003703763009980321, + "learning_rate": 1.24e-05, + "loss": 0.0294, + "step": 18800 + }, + { + "epoch": 3.762, + "grad_norm": 0.008622046560049057, + "learning_rate": 1.238e-05, + "loss": 0.0198, + "step": 18810 + }, + { + "epoch": 3.7640000000000002, + "grad_norm": 0.010497739538550377, + "learning_rate": 1.236e-05, + "loss": 0.0173, + "step": 18820 + }, + { + "epoch": 3.766, + "grad_norm": 0.0006842405418865383, + "learning_rate": 1.234e-05, + "loss": 0.0003, + "step": 18830 + }, + { + "epoch": 3.768, + "grad_norm": 0.0067459153942763805, + "learning_rate": 1.232e-05, + "loss": 0.001, + "step": 18840 + }, + { + "epoch": 3.77, + "grad_norm": 0.00805757287889719, + "learning_rate": 1.23e-05, + "loss": 0.0009, + "step": 18850 + }, + { + "epoch": 3.7720000000000002, + "grad_norm": 0.0038732087705284357, + "learning_rate": 1.2280000000000001e-05, + "loss": 0.0073, + "step": 18860 + }, + { + "epoch": 3.774, + "grad_norm": 0.005831129848957062, + "learning_rate": 1.2260000000000001e-05, + "loss": 0.0002, + "step": 18870 + }, + { + "epoch": 3.776, + "grad_norm": 0.004823405761271715, + "learning_rate": 1.224e-05, + "loss": 0.0017, + "step": 18880 + }, + { + "epoch": 3.778, + "grad_norm": 0.0019176382338628173, + "learning_rate": 1.2220000000000002e-05, + "loss": 0.0088, + "step": 18890 + }, + { + "epoch": 3.7800000000000002, + "grad_norm": 0.018935251981019974, + "learning_rate": 1.22e-05, + "loss": 0.0021, + "step": 18900 + }, + { + "epoch": 3.782, + "grad_norm": 0.010264934971928596, + "learning_rate": 1.2180000000000002e-05, + "loss": 0.067, + "step": 18910 + }, + { + "epoch": 3.784, + "grad_norm": 0.024122945964336395, + "learning_rate": 1.216e-05, + "loss": 0.0151, + "step": 18920 + }, + { + "epoch": 3.786, + "grad_norm": 0.46055740118026733, + "learning_rate": 1.214e-05, + "loss": 0.0005, + "step": 18930 + }, + { + "epoch": 3.7880000000000003, + "grad_norm": 0.004659720230847597, + "learning_rate": 1.2120000000000001e-05, + "loss": 0.0003, + "step": 18940 + }, + { + "epoch": 3.79, + "grad_norm": 0.11232586205005646, + "learning_rate": 1.2100000000000001e-05, + "loss": 0.0011, + "step": 18950 + }, + { + "epoch": 3.792, + "grad_norm": 0.0011392510496079922, + "learning_rate": 1.2080000000000001e-05, + "loss": 0.0003, + "step": 18960 + }, + { + "epoch": 3.794, + "grad_norm": 0.0054550268687307835, + "learning_rate": 1.206e-05, + "loss": 0.0203, + "step": 18970 + }, + { + "epoch": 3.7960000000000003, + "grad_norm": 0.035335637629032135, + "learning_rate": 1.204e-05, + "loss": 0.0005, + "step": 18980 + }, + { + "epoch": 3.798, + "grad_norm": 0.0006725691491737962, + "learning_rate": 1.202e-05, + "loss": 0.014, + "step": 18990 + }, + { + "epoch": 3.8, + "grad_norm": 0.006045354995876551, + "learning_rate": 1.2e-05, + "loss": 0.0102, + "step": 19000 + }, + { + "epoch": 3.802, + "grad_norm": 0.0034727011807262897, + "learning_rate": 1.198e-05, + "loss": 0.0035, + "step": 19010 + }, + { + "epoch": 3.8040000000000003, + "grad_norm": 0.006682442035526037, + "learning_rate": 1.196e-05, + "loss": 0.0004, + "step": 19020 + }, + { + "epoch": 3.806, + "grad_norm": 3.7649495601654053, + "learning_rate": 1.1940000000000001e-05, + "loss": 0.0034, + "step": 19030 + }, + { + "epoch": 3.808, + "grad_norm": 0.0046861134469509125, + "learning_rate": 1.1920000000000001e-05, + "loss": 0.0209, + "step": 19040 + }, + { + "epoch": 3.81, + "grad_norm": 0.0016382812755182385, + "learning_rate": 1.19e-05, + "loss": 0.0277, + "step": 19050 + }, + { + "epoch": 3.8120000000000003, + "grad_norm": 0.104917511343956, + "learning_rate": 1.1880000000000001e-05, + "loss": 0.0132, + "step": 19060 + }, + { + "epoch": 3.814, + "grad_norm": 6.915656089782715, + "learning_rate": 1.186e-05, + "loss": 0.0171, + "step": 19070 + }, + { + "epoch": 3.816, + "grad_norm": 0.00960025005042553, + "learning_rate": 1.1840000000000002e-05, + "loss": 0.0004, + "step": 19080 + }, + { + "epoch": 3.818, + "grad_norm": 0.004476846661418676, + "learning_rate": 1.182e-05, + "loss": 0.0487, + "step": 19090 + }, + { + "epoch": 3.82, + "grad_norm": 0.006105990614742041, + "learning_rate": 1.18e-05, + "loss": 0.0003, + "step": 19100 + }, + { + "epoch": 3.822, + "grad_norm": 1.390354871749878, + "learning_rate": 1.178e-05, + "loss": 0.0155, + "step": 19110 + }, + { + "epoch": 3.824, + "grad_norm": 5.109686851501465, + "learning_rate": 1.1760000000000001e-05, + "loss": 0.0233, + "step": 19120 + }, + { + "epoch": 3.826, + "grad_norm": 0.16702832281589508, + "learning_rate": 1.1740000000000001e-05, + "loss": 0.0324, + "step": 19130 + }, + { + "epoch": 3.828, + "grad_norm": 0.02535170502960682, + "learning_rate": 1.172e-05, + "loss": 0.0244, + "step": 19140 + }, + { + "epoch": 3.83, + "grad_norm": 0.0075645833276212215, + "learning_rate": 1.1700000000000001e-05, + "loss": 0.0002, + "step": 19150 + }, + { + "epoch": 3.832, + "grad_norm": 0.029271816834807396, + "learning_rate": 1.168e-05, + "loss": 0.0003, + "step": 19160 + }, + { + "epoch": 3.834, + "grad_norm": 0.04714861512184143, + "learning_rate": 1.166e-05, + "loss": 0.0015, + "step": 19170 + }, + { + "epoch": 3.836, + "grad_norm": 0.005103900097310543, + "learning_rate": 1.164e-05, + "loss": 0.0008, + "step": 19180 + }, + { + "epoch": 3.838, + "grad_norm": 0.004146083723753691, + "learning_rate": 1.162e-05, + "loss": 0.0007, + "step": 19190 + }, + { + "epoch": 3.84, + "grad_norm": 0.06597728282213211, + "learning_rate": 1.16e-05, + "loss": 0.0005, + "step": 19200 + }, + { + "epoch": 3.842, + "grad_norm": 0.038979671895504, + "learning_rate": 1.1580000000000001e-05, + "loss": 0.0385, + "step": 19210 + }, + { + "epoch": 3.844, + "grad_norm": 0.222853884100914, + "learning_rate": 1.156e-05, + "loss": 0.0238, + "step": 19220 + }, + { + "epoch": 3.846, + "grad_norm": 0.0034583851229399443, + "learning_rate": 1.1540000000000001e-05, + "loss": 0.0003, + "step": 19230 + }, + { + "epoch": 3.848, + "grad_norm": 0.0020076294895261526, + "learning_rate": 1.152e-05, + "loss": 0.0002, + "step": 19240 + }, + { + "epoch": 3.85, + "grad_norm": 0.0918976441025734, + "learning_rate": 1.1500000000000002e-05, + "loss": 0.0006, + "step": 19250 + }, + { + "epoch": 3.852, + "grad_norm": 0.0026816155295819044, + "learning_rate": 1.148e-05, + "loss": 0.0003, + "step": 19260 + }, + { + "epoch": 3.854, + "grad_norm": 0.003534205723553896, + "learning_rate": 1.146e-05, + "loss": 0.0532, + "step": 19270 + }, + { + "epoch": 3.856, + "grad_norm": 0.0011774309678003192, + "learning_rate": 1.144e-05, + "loss": 0.0114, + "step": 19280 + }, + { + "epoch": 3.858, + "grad_norm": 10.538480758666992, + "learning_rate": 1.142e-05, + "loss": 0.0395, + "step": 19290 + }, + { + "epoch": 3.86, + "grad_norm": 2.4867305755615234, + "learning_rate": 1.1400000000000001e-05, + "loss": 0.0599, + "step": 19300 + }, + { + "epoch": 3.862, + "grad_norm": 0.009613540023565292, + "learning_rate": 1.1380000000000001e-05, + "loss": 0.0571, + "step": 19310 + }, + { + "epoch": 3.864, + "grad_norm": 0.08618064969778061, + "learning_rate": 1.1360000000000001e-05, + "loss": 0.003, + "step": 19320 + }, + { + "epoch": 3.866, + "grad_norm": 0.013218753039836884, + "learning_rate": 1.134e-05, + "loss": 0.0019, + "step": 19330 + }, + { + "epoch": 3.868, + "grad_norm": 0.019840750843286514, + "learning_rate": 1.132e-05, + "loss": 0.0182, + "step": 19340 + }, + { + "epoch": 3.87, + "grad_norm": 0.029982879757881165, + "learning_rate": 1.13e-05, + "loss": 0.0007, + "step": 19350 + }, + { + "epoch": 3.872, + "grad_norm": 0.004936496261507273, + "learning_rate": 1.128e-05, + "loss": 0.0003, + "step": 19360 + }, + { + "epoch": 3.874, + "grad_norm": 0.006760491523891687, + "learning_rate": 1.126e-05, + "loss": 0.0227, + "step": 19370 + }, + { + "epoch": 3.876, + "grad_norm": 4.958995342254639, + "learning_rate": 1.124e-05, + "loss": 0.0459, + "step": 19380 + }, + { + "epoch": 3.878, + "grad_norm": 0.013879384845495224, + "learning_rate": 1.122e-05, + "loss": 0.0105, + "step": 19390 + }, + { + "epoch": 3.88, + "grad_norm": 0.0024123447947204113, + "learning_rate": 1.1200000000000001e-05, + "loss": 0.0348, + "step": 19400 + }, + { + "epoch": 3.882, + "grad_norm": 0.006186407525092363, + "learning_rate": 1.118e-05, + "loss": 0.0026, + "step": 19410 + }, + { + "epoch": 3.884, + "grad_norm": 0.005400556139647961, + "learning_rate": 1.1160000000000002e-05, + "loss": 0.0003, + "step": 19420 + }, + { + "epoch": 3.886, + "grad_norm": 0.010396105237305164, + "learning_rate": 1.114e-05, + "loss": 0.0006, + "step": 19430 + }, + { + "epoch": 3.888, + "grad_norm": 0.05669211596250534, + "learning_rate": 1.112e-05, + "loss": 0.0094, + "step": 19440 + }, + { + "epoch": 3.89, + "grad_norm": 0.011665274389088154, + "learning_rate": 1.11e-05, + "loss": 0.0003, + "step": 19450 + }, + { + "epoch": 3.892, + "grad_norm": 0.0033667744137346745, + "learning_rate": 1.108e-05, + "loss": 0.0003, + "step": 19460 + }, + { + "epoch": 3.894, + "grad_norm": 0.0014184300089254975, + "learning_rate": 1.106e-05, + "loss": 0.0487, + "step": 19470 + }, + { + "epoch": 3.896, + "grad_norm": 0.010493861511349678, + "learning_rate": 1.1040000000000001e-05, + "loss": 0.0004, + "step": 19480 + }, + { + "epoch": 3.898, + "grad_norm": 5.450310230255127, + "learning_rate": 1.1020000000000001e-05, + "loss": 0.0328, + "step": 19490 + }, + { + "epoch": 3.9, + "grad_norm": 0.008521239273250103, + "learning_rate": 1.1000000000000001e-05, + "loss": 0.0007, + "step": 19500 + }, + { + "epoch": 3.902, + "grad_norm": 0.002282432746142149, + "learning_rate": 1.098e-05, + "loss": 0.003, + "step": 19510 + }, + { + "epoch": 3.904, + "grad_norm": 0.008011306636035442, + "learning_rate": 1.096e-05, + "loss": 0.0017, + "step": 19520 + }, + { + "epoch": 3.906, + "grad_norm": 0.006831697188317776, + "learning_rate": 1.094e-05, + "loss": 0.0005, + "step": 19530 + }, + { + "epoch": 3.908, + "grad_norm": 0.03718950226902962, + "learning_rate": 1.092e-05, + "loss": 0.0007, + "step": 19540 + }, + { + "epoch": 3.91, + "grad_norm": 0.0013135026674717665, + "learning_rate": 1.09e-05, + "loss": 0.0308, + "step": 19550 + }, + { + "epoch": 3.912, + "grad_norm": 0.005439362488687038, + "learning_rate": 1.088e-05, + "loss": 0.0373, + "step": 19560 + }, + { + "epoch": 3.914, + "grad_norm": 0.0057830181904137135, + "learning_rate": 1.0860000000000001e-05, + "loss": 0.0488, + "step": 19570 + }, + { + "epoch": 3.916, + "grad_norm": 0.0024540277663618326, + "learning_rate": 1.084e-05, + "loss": 0.0004, + "step": 19580 + }, + { + "epoch": 3.918, + "grad_norm": 0.03300853818655014, + "learning_rate": 1.0820000000000001e-05, + "loss": 0.0005, + "step": 19590 + }, + { + "epoch": 3.92, + "grad_norm": 0.016842743381857872, + "learning_rate": 1.08e-05, + "loss": 0.0007, + "step": 19600 + }, + { + "epoch": 3.922, + "grad_norm": 0.03553108125925064, + "learning_rate": 1.0780000000000002e-05, + "loss": 0.0527, + "step": 19610 + }, + { + "epoch": 3.924, + "grad_norm": 0.007941951975226402, + "learning_rate": 1.076e-05, + "loss": 0.0003, + "step": 19620 + }, + { + "epoch": 3.926, + "grad_norm": 0.0028262296691536903, + "learning_rate": 1.074e-05, + "loss": 0.0214, + "step": 19630 + }, + { + "epoch": 3.928, + "grad_norm": 0.2088228166103363, + "learning_rate": 1.072e-05, + "loss": 0.0007, + "step": 19640 + }, + { + "epoch": 3.93, + "grad_norm": 0.012677551247179508, + "learning_rate": 1.0700000000000001e-05, + "loss": 0.0414, + "step": 19650 + }, + { + "epoch": 3.932, + "grad_norm": 0.1734095960855484, + "learning_rate": 1.0680000000000001e-05, + "loss": 0.0065, + "step": 19660 + }, + { + "epoch": 3.934, + "grad_norm": 0.0029586998280137777, + "learning_rate": 1.0660000000000001e-05, + "loss": 0.0002, + "step": 19670 + }, + { + "epoch": 3.936, + "grad_norm": 0.018373634666204453, + "learning_rate": 1.064e-05, + "loss": 0.0011, + "step": 19680 + }, + { + "epoch": 3.9379999999999997, + "grad_norm": 0.010094398632645607, + "learning_rate": 1.062e-05, + "loss": 0.0497, + "step": 19690 + }, + { + "epoch": 3.94, + "grad_norm": 0.06180315464735031, + "learning_rate": 1.06e-05, + "loss": 0.0927, + "step": 19700 + }, + { + "epoch": 3.942, + "grad_norm": 0.0060223545879125595, + "learning_rate": 1.058e-05, + "loss": 0.001, + "step": 19710 + }, + { + "epoch": 3.944, + "grad_norm": 0.006959962192922831, + "learning_rate": 1.056e-05, + "loss": 0.0007, + "step": 19720 + }, + { + "epoch": 3.9459999999999997, + "grad_norm": 0.17345768213272095, + "learning_rate": 1.0539999999999999e-05, + "loss": 0.0333, + "step": 19730 + }, + { + "epoch": 3.948, + "grad_norm": 0.003703049151226878, + "learning_rate": 1.0520000000000001e-05, + "loss": 0.0433, + "step": 19740 + }, + { + "epoch": 3.95, + "grad_norm": 0.0042807250283658504, + "learning_rate": 1.05e-05, + "loss": 0.0009, + "step": 19750 + }, + { + "epoch": 3.952, + "grad_norm": 0.012641056440770626, + "learning_rate": 1.0480000000000001e-05, + "loss": 0.0014, + "step": 19760 + }, + { + "epoch": 3.9539999999999997, + "grad_norm": 0.012951981276273727, + "learning_rate": 1.046e-05, + "loss": 0.0241, + "step": 19770 + }, + { + "epoch": 3.956, + "grad_norm": 0.068780317902565, + "learning_rate": 1.0440000000000002e-05, + "loss": 0.0309, + "step": 19780 + }, + { + "epoch": 3.958, + "grad_norm": 0.005025830119848251, + "learning_rate": 1.042e-05, + "loss": 0.0008, + "step": 19790 + }, + { + "epoch": 3.96, + "grad_norm": 0.39109471440315247, + "learning_rate": 1.04e-05, + "loss": 0.0009, + "step": 19800 + }, + { + "epoch": 3.9619999999999997, + "grad_norm": 0.0031092409044504166, + "learning_rate": 1.038e-05, + "loss": 0.0017, + "step": 19810 + }, + { + "epoch": 3.964, + "grad_norm": 0.013690159656107426, + "learning_rate": 1.036e-05, + "loss": 0.0066, + "step": 19820 + }, + { + "epoch": 3.966, + "grad_norm": 0.050386037677526474, + "learning_rate": 1.0340000000000001e-05, + "loss": 0.0004, + "step": 19830 + }, + { + "epoch": 3.968, + "grad_norm": 0.009826398454606533, + "learning_rate": 1.0320000000000001e-05, + "loss": 0.0006, + "step": 19840 + }, + { + "epoch": 3.9699999999999998, + "grad_norm": 0.005541136488318443, + "learning_rate": 1.03e-05, + "loss": 0.001, + "step": 19850 + }, + { + "epoch": 3.972, + "grad_norm": 0.03238997980952263, + "learning_rate": 1.0280000000000002e-05, + "loss": 0.0013, + "step": 19860 + }, + { + "epoch": 3.974, + "grad_norm": 0.01623641885817051, + "learning_rate": 1.026e-05, + "loss": 0.03, + "step": 19870 + }, + { + "epoch": 3.976, + "grad_norm": 2.500455856323242, + "learning_rate": 1.024e-05, + "loss": 0.0758, + "step": 19880 + }, + { + "epoch": 3.9779999999999998, + "grad_norm": 0.0059792581014335155, + "learning_rate": 1.022e-05, + "loss": 0.0005, + "step": 19890 + }, + { + "epoch": 3.98, + "grad_norm": 0.0036019564140588045, + "learning_rate": 1.02e-05, + "loss": 0.0019, + "step": 19900 + }, + { + "epoch": 3.982, + "grad_norm": 0.006227910052984953, + "learning_rate": 1.018e-05, + "loss": 0.0174, + "step": 19910 + }, + { + "epoch": 3.984, + "grad_norm": 0.05088542029261589, + "learning_rate": 1.016e-05, + "loss": 0.0022, + "step": 19920 + }, + { + "epoch": 3.9859999999999998, + "grad_norm": 0.21118783950805664, + "learning_rate": 1.0140000000000001e-05, + "loss": 0.0053, + "step": 19930 + }, + { + "epoch": 3.988, + "grad_norm": 0.003003940684720874, + "learning_rate": 1.012e-05, + "loss": 0.0005, + "step": 19940 + }, + { + "epoch": 3.99, + "grad_norm": 0.012095367535948753, + "learning_rate": 1.0100000000000002e-05, + "loss": 0.0414, + "step": 19950 + }, + { + "epoch": 3.992, + "grad_norm": 0.012738720513880253, + "learning_rate": 1.008e-05, + "loss": 0.0462, + "step": 19960 + }, + { + "epoch": 3.9939999999999998, + "grad_norm": 0.04133102670311928, + "learning_rate": 1.006e-05, + "loss": 0.0019, + "step": 19970 + }, + { + "epoch": 3.996, + "grad_norm": 0.0019321141298860312, + "learning_rate": 1.004e-05, + "loss": 0.0026, + "step": 19980 + }, + { + "epoch": 3.998, + "grad_norm": 0.006852047983556986, + "learning_rate": 1.002e-05, + "loss": 0.0002, + "step": 19990 + }, + { + "epoch": 4.0, + "grad_norm": 0.0022855447605252266, + "learning_rate": 1e-05, + "loss": 0.0002, + "step": 20000 + }, + { + "epoch": 4.0, + "eval_loss": 0.012842352502048016, + "eval_runtime": 94.2048, + "eval_samples_per_second": 106.152, + "eval_steps_per_second": 13.269, + "step": 20000 + } + ], + "logging_steps": 10, + "max_steps": 25000, + "num_input_tokens_seen": 0, + "num_train_epochs": 5, + "save_steps": 500, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": false + }, + "attributes": {} + } + }, + "total_flos": 4.62403141632e+16, + "train_batch_size": 8, + "trial_name": null, + "trial_params": null +}