|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.99290780141844, |
|
"eval_steps": 500, |
|
"global_step": 1230, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.004052684903748734, |
|
"grad_norm": 6.599023045587286, |
|
"learning_rate": 3.2520325203252037e-07, |
|
"loss": 1.0514, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.008105369807497468, |
|
"grad_norm": 6.47445504180245, |
|
"learning_rate": 6.504065040650407e-07, |
|
"loss": 1.0362, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0121580547112462, |
|
"grad_norm": 6.686247018328671, |
|
"learning_rate": 9.75609756097561e-07, |
|
"loss": 1.0544, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.016210739614994935, |
|
"grad_norm": 6.421449402635541, |
|
"learning_rate": 1.3008130081300815e-06, |
|
"loss": 1.0401, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.020263424518743668, |
|
"grad_norm": 6.225536077141368, |
|
"learning_rate": 1.6260162601626018e-06, |
|
"loss": 1.042, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0243161094224924, |
|
"grad_norm": 6.079425532546916, |
|
"learning_rate": 1.951219512195122e-06, |
|
"loss": 1.039, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.028368794326241134, |
|
"grad_norm": 4.747162327497833, |
|
"learning_rate": 2.2764227642276426e-06, |
|
"loss": 0.9873, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.03242147922998987, |
|
"grad_norm": 4.591298946716306, |
|
"learning_rate": 2.601626016260163e-06, |
|
"loss": 1.0035, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0364741641337386, |
|
"grad_norm": 2.8712908458609747, |
|
"learning_rate": 2.926829268292683e-06, |
|
"loss": 0.9471, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.040526849037487336, |
|
"grad_norm": 2.562130612426219, |
|
"learning_rate": 3.2520325203252037e-06, |
|
"loss": 0.9319, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.044579533941236066, |
|
"grad_norm": 2.575689379913838, |
|
"learning_rate": 3.577235772357724e-06, |
|
"loss": 0.9467, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0486322188449848, |
|
"grad_norm": 3.7930907311939817, |
|
"learning_rate": 3.902439024390244e-06, |
|
"loss": 0.9029, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.05268490374873354, |
|
"grad_norm": 4.170067449993447, |
|
"learning_rate": 4.227642276422765e-06, |
|
"loss": 0.924, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.05673758865248227, |
|
"grad_norm": 3.965497393388768, |
|
"learning_rate": 4.552845528455285e-06, |
|
"loss": 0.9197, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.060790273556231005, |
|
"grad_norm": 3.738532012764317, |
|
"learning_rate": 4.8780487804878055e-06, |
|
"loss": 0.8887, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.06484295845997974, |
|
"grad_norm": 2.792925295998095, |
|
"learning_rate": 5.203252032520326e-06, |
|
"loss": 0.8642, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.06889564336372847, |
|
"grad_norm": 2.5039001786604347, |
|
"learning_rate": 5.528455284552846e-06, |
|
"loss": 0.839, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0729483282674772, |
|
"grad_norm": 2.0553313011702494, |
|
"learning_rate": 5.853658536585366e-06, |
|
"loss": 0.8238, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.07700101317122594, |
|
"grad_norm": 1.5773393617325848, |
|
"learning_rate": 6.178861788617887e-06, |
|
"loss": 0.8125, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.08105369807497467, |
|
"grad_norm": 1.2593251921114146, |
|
"learning_rate": 6.504065040650407e-06, |
|
"loss": 0.7996, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0851063829787234, |
|
"grad_norm": 1.211398822943914, |
|
"learning_rate": 6.829268292682928e-06, |
|
"loss": 0.791, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.08915906788247213, |
|
"grad_norm": 1.3023956820532772, |
|
"learning_rate": 7.154471544715448e-06, |
|
"loss": 0.7845, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.09321175278622088, |
|
"grad_norm": 1.1161869174225816, |
|
"learning_rate": 7.4796747967479676e-06, |
|
"loss": 0.7643, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.0972644376899696, |
|
"grad_norm": 1.006673189543252, |
|
"learning_rate": 7.804878048780489e-06, |
|
"loss": 0.7651, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.10131712259371833, |
|
"grad_norm": 0.893820815392647, |
|
"learning_rate": 8.130081300813009e-06, |
|
"loss": 0.7303, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.10536980749746708, |
|
"grad_norm": 0.9560473442751164, |
|
"learning_rate": 8.45528455284553e-06, |
|
"loss": 0.7368, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.1094224924012158, |
|
"grad_norm": 0.8802110889639827, |
|
"learning_rate": 8.78048780487805e-06, |
|
"loss": 0.7253, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.11347517730496454, |
|
"grad_norm": 0.748527512955968, |
|
"learning_rate": 9.10569105691057e-06, |
|
"loss": 0.7189, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.11752786220871327, |
|
"grad_norm": 0.7125428348195815, |
|
"learning_rate": 9.43089430894309e-06, |
|
"loss": 0.7207, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.12158054711246201, |
|
"grad_norm": 0.8467727891606993, |
|
"learning_rate": 9.756097560975611e-06, |
|
"loss": 0.7194, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.12563323201621074, |
|
"grad_norm": 0.8073050367246051, |
|
"learning_rate": 1.008130081300813e-05, |
|
"loss": 0.7013, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.12968591691995948, |
|
"grad_norm": 0.6270370447029, |
|
"learning_rate": 1.0406504065040652e-05, |
|
"loss": 0.7167, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.1337386018237082, |
|
"grad_norm": 0.7265413634779462, |
|
"learning_rate": 1.0731707317073172e-05, |
|
"loss": 0.7171, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.13779128672745694, |
|
"grad_norm": 0.6576559175700657, |
|
"learning_rate": 1.1056910569105692e-05, |
|
"loss": 0.6808, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.14184397163120568, |
|
"grad_norm": 0.8234127148418743, |
|
"learning_rate": 1.1382113821138213e-05, |
|
"loss": 0.6799, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.1458966565349544, |
|
"grad_norm": 0.6058143860968938, |
|
"learning_rate": 1.1707317073170731e-05, |
|
"loss": 0.6916, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.14994934143870314, |
|
"grad_norm": 0.6467038659776014, |
|
"learning_rate": 1.2032520325203254e-05, |
|
"loss": 0.6841, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.1540020263424519, |
|
"grad_norm": 0.7316541503441805, |
|
"learning_rate": 1.2357723577235774e-05, |
|
"loss": 0.678, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.1580547112462006, |
|
"grad_norm": 0.7472149382046376, |
|
"learning_rate": 1.2682926829268294e-05, |
|
"loss": 0.6889, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.16210739614994935, |
|
"grad_norm": 0.5793146399762638, |
|
"learning_rate": 1.3008130081300815e-05, |
|
"loss": 0.6842, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.1661600810536981, |
|
"grad_norm": 0.6875199051363161, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 0.6668, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.1702127659574468, |
|
"grad_norm": 0.5703715776394133, |
|
"learning_rate": 1.3658536585365855e-05, |
|
"loss": 0.652, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.17426545086119555, |
|
"grad_norm": 0.5352526787481642, |
|
"learning_rate": 1.3983739837398376e-05, |
|
"loss": 0.6664, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.17831813576494426, |
|
"grad_norm": 0.6169194929882444, |
|
"learning_rate": 1.4308943089430896e-05, |
|
"loss": 0.6611, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.182370820668693, |
|
"grad_norm": 0.5580859953378566, |
|
"learning_rate": 1.4634146341463415e-05, |
|
"loss": 0.6501, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.18642350557244175, |
|
"grad_norm": 0.6224025899791772, |
|
"learning_rate": 1.4959349593495935e-05, |
|
"loss": 0.6579, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.19047619047619047, |
|
"grad_norm": 0.5381919334902069, |
|
"learning_rate": 1.528455284552846e-05, |
|
"loss": 0.6634, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.1945288753799392, |
|
"grad_norm": 0.6010567456784308, |
|
"learning_rate": 1.5609756097560978e-05, |
|
"loss": 0.6487, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.19858156028368795, |
|
"grad_norm": 0.6488990433414341, |
|
"learning_rate": 1.5934959349593496e-05, |
|
"loss": 0.6578, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.20263424518743667, |
|
"grad_norm": 0.4477246718607412, |
|
"learning_rate": 1.6260162601626018e-05, |
|
"loss": 0.6503, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.2066869300911854, |
|
"grad_norm": 0.6426477379434364, |
|
"learning_rate": 1.6585365853658537e-05, |
|
"loss": 0.6696, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.21073961499493415, |
|
"grad_norm": 0.5044801809994581, |
|
"learning_rate": 1.691056910569106e-05, |
|
"loss": 0.6517, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.21479229989868287, |
|
"grad_norm": 0.5119480455859519, |
|
"learning_rate": 1.7235772357723578e-05, |
|
"loss": 0.6511, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.2188449848024316, |
|
"grad_norm": 0.5584164405076512, |
|
"learning_rate": 1.75609756097561e-05, |
|
"loss": 0.6513, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.22289766970618036, |
|
"grad_norm": 0.4851555351275332, |
|
"learning_rate": 1.788617886178862e-05, |
|
"loss": 0.6578, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.22695035460992907, |
|
"grad_norm": 0.4791477452289817, |
|
"learning_rate": 1.821138211382114e-05, |
|
"loss": 0.6411, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.23100303951367782, |
|
"grad_norm": 0.6818115167524869, |
|
"learning_rate": 1.8536585365853663e-05, |
|
"loss": 0.6364, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.23505572441742653, |
|
"grad_norm": 0.674996775789408, |
|
"learning_rate": 1.886178861788618e-05, |
|
"loss": 0.6445, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.23910840932117527, |
|
"grad_norm": 0.5049699624546058, |
|
"learning_rate": 1.91869918699187e-05, |
|
"loss": 0.6467, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.24316109422492402, |
|
"grad_norm": 0.6910583208230778, |
|
"learning_rate": 1.9512195121951222e-05, |
|
"loss": 0.6444, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.24721377912867273, |
|
"grad_norm": 0.7937751674365281, |
|
"learning_rate": 1.983739837398374e-05, |
|
"loss": 0.6325, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.2512664640324215, |
|
"grad_norm": 0.6497295123578288, |
|
"learning_rate": 2.016260162601626e-05, |
|
"loss": 0.6362, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.2553191489361702, |
|
"grad_norm": 0.83011660343835, |
|
"learning_rate": 2.048780487804878e-05, |
|
"loss": 0.6275, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.25937183383991896, |
|
"grad_norm": 0.7993008910455739, |
|
"learning_rate": 2.0813008130081303e-05, |
|
"loss": 0.6435, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.2634245187436677, |
|
"grad_norm": 1.08783893201976, |
|
"learning_rate": 2.1138211382113822e-05, |
|
"loss": 0.6449, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.2674772036474164, |
|
"grad_norm": 0.6897874653005953, |
|
"learning_rate": 2.1463414634146344e-05, |
|
"loss": 0.6305, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.27152988855116517, |
|
"grad_norm": 0.8919102060332686, |
|
"learning_rate": 2.1788617886178863e-05, |
|
"loss": 0.6267, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.2755825734549139, |
|
"grad_norm": 0.9844431258155775, |
|
"learning_rate": 2.2113821138211385e-05, |
|
"loss": 0.6296, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.2796352583586626, |
|
"grad_norm": 0.9194076598934081, |
|
"learning_rate": 2.2439024390243907e-05, |
|
"loss": 0.6344, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.28368794326241137, |
|
"grad_norm": 1.1065466031734117, |
|
"learning_rate": 2.2764227642276426e-05, |
|
"loss": 0.6243, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.2877406281661601, |
|
"grad_norm": 0.7208673076659101, |
|
"learning_rate": 2.3089430894308948e-05, |
|
"loss": 0.6159, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.2917933130699088, |
|
"grad_norm": 0.73452231554592, |
|
"learning_rate": 2.3414634146341463e-05, |
|
"loss": 0.6083, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.29584599797365757, |
|
"grad_norm": 0.8253144357432102, |
|
"learning_rate": 2.3739837398373985e-05, |
|
"loss": 0.6276, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.2998986828774063, |
|
"grad_norm": 0.8734929655139548, |
|
"learning_rate": 2.4065040650406507e-05, |
|
"loss": 0.6129, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.303951367781155, |
|
"grad_norm": 0.9112601084025713, |
|
"learning_rate": 2.4390243902439026e-05, |
|
"loss": 0.6236, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.3080040526849038, |
|
"grad_norm": 0.8415231090703136, |
|
"learning_rate": 2.4715447154471548e-05, |
|
"loss": 0.6263, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.3120567375886525, |
|
"grad_norm": 0.8462930373145203, |
|
"learning_rate": 2.5040650406504066e-05, |
|
"loss": 0.6229, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.3161094224924012, |
|
"grad_norm": 0.8727171669136281, |
|
"learning_rate": 2.536585365853659e-05, |
|
"loss": 0.6155, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.32016210739615, |
|
"grad_norm": 0.9599251286576289, |
|
"learning_rate": 2.569105691056911e-05, |
|
"loss": 0.6137, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.3242147922998987, |
|
"grad_norm": 1.0662735894475077, |
|
"learning_rate": 2.601626016260163e-05, |
|
"loss": 0.6111, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.3282674772036474, |
|
"grad_norm": 0.7853059023523702, |
|
"learning_rate": 2.634146341463415e-05, |
|
"loss": 0.6187, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.3323201621073962, |
|
"grad_norm": 0.9795949167273307, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 0.6168, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.3363728470111449, |
|
"grad_norm": 1.0445722702179447, |
|
"learning_rate": 2.699186991869919e-05, |
|
"loss": 0.6119, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.3404255319148936, |
|
"grad_norm": 0.8674808814604583, |
|
"learning_rate": 2.731707317073171e-05, |
|
"loss": 0.6123, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.3444782168186423, |
|
"grad_norm": 0.7114489673791807, |
|
"learning_rate": 2.764227642276423e-05, |
|
"loss": 0.6202, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.3485309017223911, |
|
"grad_norm": 0.7731889768192329, |
|
"learning_rate": 2.796747967479675e-05, |
|
"loss": 0.6287, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.3525835866261398, |
|
"grad_norm": 0.956648109933667, |
|
"learning_rate": 2.829268292682927e-05, |
|
"loss": 0.604, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.3566362715298885, |
|
"grad_norm": 1.0370738380771238, |
|
"learning_rate": 2.8617886178861792e-05, |
|
"loss": 0.609, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.3606889564336373, |
|
"grad_norm": 1.1777960561988736, |
|
"learning_rate": 2.8943089430894314e-05, |
|
"loss": 0.6166, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.364741641337386, |
|
"grad_norm": 1.086791362669415, |
|
"learning_rate": 2.926829268292683e-05, |
|
"loss": 0.6205, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.36879432624113473, |
|
"grad_norm": 0.9770790180691501, |
|
"learning_rate": 2.959349593495935e-05, |
|
"loss": 0.6038, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.3728470111448835, |
|
"grad_norm": 1.0327060681903977, |
|
"learning_rate": 2.991869918699187e-05, |
|
"loss": 0.5924, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.3768996960486322, |
|
"grad_norm": 1.1577643786737397, |
|
"learning_rate": 3.0243902439024392e-05, |
|
"loss": 0.5966, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.38095238095238093, |
|
"grad_norm": 0.9851717301392412, |
|
"learning_rate": 3.056910569105692e-05, |
|
"loss": 0.6025, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.3850050658561297, |
|
"grad_norm": 0.9305559134381358, |
|
"learning_rate": 3.089430894308943e-05, |
|
"loss": 0.6155, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.3890577507598784, |
|
"grad_norm": 1.0531383999376154, |
|
"learning_rate": 3.1219512195121955e-05, |
|
"loss": 0.6034, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.39311043566362713, |
|
"grad_norm": 1.4739439835432528, |
|
"learning_rate": 3.154471544715447e-05, |
|
"loss": 0.608, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.3971631205673759, |
|
"grad_norm": 0.8903811047308882, |
|
"learning_rate": 3.186991869918699e-05, |
|
"loss": 0.6009, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.4012158054711246, |
|
"grad_norm": 1.2890094312999683, |
|
"learning_rate": 3.2195121951219514e-05, |
|
"loss": 0.6145, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.40526849037487334, |
|
"grad_norm": 1.5377901588674554, |
|
"learning_rate": 3.2520325203252037e-05, |
|
"loss": 0.6165, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.4093211752786221, |
|
"grad_norm": 0.9798279967084077, |
|
"learning_rate": 3.284552845528456e-05, |
|
"loss": 0.6121, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.4133738601823708, |
|
"grad_norm": 2.374826379079598, |
|
"learning_rate": 3.3170731707317074e-05, |
|
"loss": 0.608, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.41742654508611954, |
|
"grad_norm": 1.8195762020549644, |
|
"learning_rate": 3.3495934959349596e-05, |
|
"loss": 0.607, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.4214792299898683, |
|
"grad_norm": 1.8943041468337254, |
|
"learning_rate": 3.382113821138212e-05, |
|
"loss": 0.611, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.425531914893617, |
|
"grad_norm": 1.9196628831168747, |
|
"learning_rate": 3.414634146341463e-05, |
|
"loss": 0.6119, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.42958459979736574, |
|
"grad_norm": 1.3202498996535552, |
|
"learning_rate": 3.4471544715447155e-05, |
|
"loss": 0.5882, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.4336372847011145, |
|
"grad_norm": 1.3681923413251689, |
|
"learning_rate": 3.479674796747968e-05, |
|
"loss": 0.6032, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.4376899696048632, |
|
"grad_norm": 1.4144616570974533, |
|
"learning_rate": 3.51219512195122e-05, |
|
"loss": 0.5977, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.44174265450861194, |
|
"grad_norm": 1.0429952162358156, |
|
"learning_rate": 3.544715447154472e-05, |
|
"loss": 0.6193, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.4457953394123607, |
|
"grad_norm": 1.5534020292030635, |
|
"learning_rate": 3.577235772357724e-05, |
|
"loss": 0.6133, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.44984802431610943, |
|
"grad_norm": 0.9920400286293563, |
|
"learning_rate": 3.609756097560976e-05, |
|
"loss": 0.6137, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.45390070921985815, |
|
"grad_norm": 1.7497537633064735, |
|
"learning_rate": 3.642276422764228e-05, |
|
"loss": 0.6036, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.4579533941236069, |
|
"grad_norm": 1.4500100577955872, |
|
"learning_rate": 3.67479674796748e-05, |
|
"loss": 0.6024, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.46200607902735563, |
|
"grad_norm": 1.436311058181195, |
|
"learning_rate": 3.7073170731707325e-05, |
|
"loss": 0.6028, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.46605876393110435, |
|
"grad_norm": 1.2660602143533284, |
|
"learning_rate": 3.739837398373984e-05, |
|
"loss": 0.5965, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.47011144883485306, |
|
"grad_norm": 1.183335814860972, |
|
"learning_rate": 3.772357723577236e-05, |
|
"loss": 0.6009, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.47416413373860183, |
|
"grad_norm": 1.186882558761872, |
|
"learning_rate": 3.804878048780488e-05, |
|
"loss": 0.6078, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.47821681864235055, |
|
"grad_norm": 1.2935580575767696, |
|
"learning_rate": 3.83739837398374e-05, |
|
"loss": 0.5862, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.48226950354609927, |
|
"grad_norm": 1.0182066029183403, |
|
"learning_rate": 3.869918699186992e-05, |
|
"loss": 0.5932, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.48632218844984804, |
|
"grad_norm": 1.233971548054037, |
|
"learning_rate": 3.9024390243902444e-05, |
|
"loss": 0.6064, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.49037487335359675, |
|
"grad_norm": 1.0587585632805925, |
|
"learning_rate": 3.9349593495934966e-05, |
|
"loss": 0.614, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.49442755825734547, |
|
"grad_norm": 1.026550579721245, |
|
"learning_rate": 3.967479674796748e-05, |
|
"loss": 0.5964, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.49848024316109424, |
|
"grad_norm": 1.0202621301409174, |
|
"learning_rate": 4e-05, |
|
"loss": 0.5996, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.502532928064843, |
|
"grad_norm": 0.9645823620574748, |
|
"learning_rate": 3.999991946137476e-05, |
|
"loss": 0.591, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.5065856129685917, |
|
"grad_norm": 0.9727069245390664, |
|
"learning_rate": 3.999967784614766e-05, |
|
"loss": 0.5927, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.5106382978723404, |
|
"grad_norm": 0.9575106494245289, |
|
"learning_rate": 3.9999275156264656e-05, |
|
"loss": 0.5841, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.5146909827760892, |
|
"grad_norm": 0.9244278735699253, |
|
"learning_rate": 3.999871139496895e-05, |
|
"loss": 0.5865, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.5187436676798379, |
|
"grad_norm": 0.8645848502796644, |
|
"learning_rate": 3.9997986566800995e-05, |
|
"loss": 0.5965, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.5227963525835866, |
|
"grad_norm": 0.7987115788482113, |
|
"learning_rate": 3.999710067759846e-05, |
|
"loss": 0.6002, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.5268490374873354, |
|
"grad_norm": 0.8484821578185601, |
|
"learning_rate": 3.999605373449617e-05, |
|
"loss": 0.5954, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.5309017223910841, |
|
"grad_norm": 0.971580608401574, |
|
"learning_rate": 3.9994845745926075e-05, |
|
"loss": 0.5927, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.5349544072948328, |
|
"grad_norm": 0.9747622535790835, |
|
"learning_rate": 3.999347672161713e-05, |
|
"loss": 0.5905, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.5390070921985816, |
|
"grad_norm": 1.1166476115686765, |
|
"learning_rate": 3.999194667259528e-05, |
|
"loss": 0.6016, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.5430597771023303, |
|
"grad_norm": 0.6761046565653213, |
|
"learning_rate": 3.999025561118334e-05, |
|
"loss": 0.5908, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.547112462006079, |
|
"grad_norm": 0.6839236271714765, |
|
"learning_rate": 3.998840355100086e-05, |
|
"loss": 0.5944, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.5511651469098278, |
|
"grad_norm": 0.7029152980988005, |
|
"learning_rate": 3.998639050696409e-05, |
|
"loss": 0.6005, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.5552178318135765, |
|
"grad_norm": 0.6258283013178816, |
|
"learning_rate": 3.998421649528582e-05, |
|
"loss": 0.5913, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.5592705167173252, |
|
"grad_norm": 0.9719197743275402, |
|
"learning_rate": 3.9981881533475234e-05, |
|
"loss": 0.5843, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.563323201621074, |
|
"grad_norm": 0.7244697798564573, |
|
"learning_rate": 3.997938564033779e-05, |
|
"loss": 0.5927, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.5673758865248227, |
|
"grad_norm": 0.6116455492566624, |
|
"learning_rate": 3.9976728835975064e-05, |
|
"loss": 0.5961, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.5714285714285714, |
|
"grad_norm": 0.5597555162335052, |
|
"learning_rate": 3.9973911141784605e-05, |
|
"loss": 0.5854, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.5754812563323202, |
|
"grad_norm": 0.6355188391656353, |
|
"learning_rate": 3.997093258045973e-05, |
|
"loss": 0.6044, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.5795339412360689, |
|
"grad_norm": 0.8091179535502104, |
|
"learning_rate": 3.996779317598936e-05, |
|
"loss": 0.578, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.5835866261398176, |
|
"grad_norm": 1.3524064626747305, |
|
"learning_rate": 3.996449295365782e-05, |
|
"loss": 0.6639, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.5876393110435664, |
|
"grad_norm": 0.6144276860924925, |
|
"learning_rate": 3.996103194004467e-05, |
|
"loss": 0.5946, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.5916919959473151, |
|
"grad_norm": 0.8900594482542501, |
|
"learning_rate": 3.995741016302441e-05, |
|
"loss": 0.5927, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.5957446808510638, |
|
"grad_norm": 1.1307082657573655, |
|
"learning_rate": 3.9953627651766364e-05, |
|
"loss": 0.6019, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.5997973657548126, |
|
"grad_norm": 0.6093630761114724, |
|
"learning_rate": 3.9949684436734325e-05, |
|
"loss": 0.5832, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.6038500506585613, |
|
"grad_norm": 0.8927507499625856, |
|
"learning_rate": 3.994558054968643e-05, |
|
"loss": 0.5923, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.60790273556231, |
|
"grad_norm": 0.9044347991443313, |
|
"learning_rate": 3.994131602367481e-05, |
|
"loss": 0.6008, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.6119554204660588, |
|
"grad_norm": 0.6889495233948225, |
|
"learning_rate": 3.9936890893045376e-05, |
|
"loss": 0.5769, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.6160081053698075, |
|
"grad_norm": 0.5636111290380746, |
|
"learning_rate": 3.993230519343752e-05, |
|
"loss": 0.5864, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.6200607902735562, |
|
"grad_norm": 0.6063529038504148, |
|
"learning_rate": 3.992755896178383e-05, |
|
"loss": 0.5976, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.624113475177305, |
|
"grad_norm": 0.5962853484434704, |
|
"learning_rate": 3.992265223630981e-05, |
|
"loss": 0.5981, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.6281661600810537, |
|
"grad_norm": 0.594526735068891, |
|
"learning_rate": 3.991758505653355e-05, |
|
"loss": 0.5989, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.6322188449848024, |
|
"grad_norm": 0.6732444351353275, |
|
"learning_rate": 3.991235746326543e-05, |
|
"loss": 0.5866, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.6362715298885512, |
|
"grad_norm": 0.692304303556478, |
|
"learning_rate": 3.9906969498607745e-05, |
|
"loss": 0.5918, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.6403242147923, |
|
"grad_norm": 0.6637571415045488, |
|
"learning_rate": 3.990142120595444e-05, |
|
"loss": 0.5884, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.6443768996960486, |
|
"grad_norm": 0.7688867999656185, |
|
"learning_rate": 3.98957126299907e-05, |
|
"loss": 0.5869, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.6484295845997974, |
|
"grad_norm": 0.7389578037108177, |
|
"learning_rate": 3.9889843816692596e-05, |
|
"loss": 0.5933, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.6524822695035462, |
|
"grad_norm": 0.6443966658312461, |
|
"learning_rate": 3.9883814813326766e-05, |
|
"loss": 0.5819, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.6565349544072948, |
|
"grad_norm": 0.7022403859302037, |
|
"learning_rate": 3.9877625668449956e-05, |
|
"loss": 0.5956, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.6605876393110436, |
|
"grad_norm": 0.6605782438262765, |
|
"learning_rate": 3.98712764319087e-05, |
|
"loss": 0.5794, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.6646403242147924, |
|
"grad_norm": 0.5956978402705035, |
|
"learning_rate": 3.9864767154838864e-05, |
|
"loss": 0.5855, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.668693009118541, |
|
"grad_norm": 0.5830874156149116, |
|
"learning_rate": 3.9858097889665277e-05, |
|
"loss": 0.593, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.6727456940222898, |
|
"grad_norm": 0.629907908872092, |
|
"learning_rate": 3.985126869010129e-05, |
|
"loss": 0.5861, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.6767983789260384, |
|
"grad_norm": 0.6851671018144985, |
|
"learning_rate": 3.984427961114833e-05, |
|
"loss": 0.5746, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.6808510638297872, |
|
"grad_norm": 0.5650956231842051, |
|
"learning_rate": 3.9837130709095475e-05, |
|
"loss": 0.6105, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.684903748733536, |
|
"grad_norm": 0.51559674706075, |
|
"learning_rate": 3.982982204151901e-05, |
|
"loss": 0.5921, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.6889564336372846, |
|
"grad_norm": 0.5385953173582195, |
|
"learning_rate": 3.982235366728193e-05, |
|
"loss": 0.5903, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.6930091185410334, |
|
"grad_norm": 0.595519385692879, |
|
"learning_rate": 3.9814725646533505e-05, |
|
"loss": 0.5723, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.6970618034447822, |
|
"grad_norm": 0.6876881404617907, |
|
"learning_rate": 3.9806938040708746e-05, |
|
"loss": 0.5759, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.7011144883485309, |
|
"grad_norm": 0.8264617094791407, |
|
"learning_rate": 3.9798990912527976e-05, |
|
"loss": 0.575, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.7051671732522796, |
|
"grad_norm": 0.5893607996668139, |
|
"learning_rate": 3.979088432599627e-05, |
|
"loss": 0.5775, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.7092198581560284, |
|
"grad_norm": 0.4919074691226593, |
|
"learning_rate": 3.9782618346402964e-05, |
|
"loss": 0.5583, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.713272543059777, |
|
"grad_norm": 0.6457413301136418, |
|
"learning_rate": 3.977419304032111e-05, |
|
"loss": 0.5829, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.7173252279635258, |
|
"grad_norm": 0.7887202287284762, |
|
"learning_rate": 3.976560847560697e-05, |
|
"loss": 0.588, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.7213779128672746, |
|
"grad_norm": 0.6236837362263904, |
|
"learning_rate": 3.9756864721399456e-05, |
|
"loss": 0.5762, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.7254305977710233, |
|
"grad_norm": 0.7917934414327535, |
|
"learning_rate": 3.974796184811956e-05, |
|
"loss": 0.578, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.729483282674772, |
|
"grad_norm": 0.7864273354959346, |
|
"learning_rate": 3.973889992746979e-05, |
|
"loss": 0.576, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.7335359675785208, |
|
"grad_norm": 0.47702577115895367, |
|
"learning_rate": 3.972967903243361e-05, |
|
"loss": 0.574, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.7375886524822695, |
|
"grad_norm": 0.6016512243297031, |
|
"learning_rate": 3.972029923727486e-05, |
|
"loss": 0.5781, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.7416413373860182, |
|
"grad_norm": 3.613647139307396, |
|
"learning_rate": 3.971076061753709e-05, |
|
"loss": 0.6272, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.745694022289767, |
|
"grad_norm": 0.8424059579908663, |
|
"learning_rate": 3.9701063250043066e-05, |
|
"loss": 0.5804, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.7497467071935157, |
|
"grad_norm": 0.7171898071813968, |
|
"learning_rate": 3.969120721289402e-05, |
|
"loss": 0.5734, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.7537993920972644, |
|
"grad_norm": 0.6430828612704044, |
|
"learning_rate": 3.9681192585469146e-05, |
|
"loss": 0.5759, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.7578520770010132, |
|
"grad_norm": 0.5411949543263597, |
|
"learning_rate": 3.9671019448424865e-05, |
|
"loss": 0.5772, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.7619047619047619, |
|
"grad_norm": 0.6629915907028012, |
|
"learning_rate": 3.966068788369422e-05, |
|
"loss": 0.5696, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.7659574468085106, |
|
"grad_norm": 0.6197495345472117, |
|
"learning_rate": 3.965019797448622e-05, |
|
"loss": 0.5804, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.7700101317122594, |
|
"grad_norm": 0.43877020272001177, |
|
"learning_rate": 3.963954980528515e-05, |
|
"loss": 0.5816, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.7740628166160081, |
|
"grad_norm": 0.6372327284226272, |
|
"learning_rate": 3.9628743461849905e-05, |
|
"loss": 0.5725, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.7781155015197568, |
|
"grad_norm": 0.5616348893638621, |
|
"learning_rate": 3.961777903121329e-05, |
|
"loss": 0.5799, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.7821681864235056, |
|
"grad_norm": 0.538090864506061, |
|
"learning_rate": 3.960665660168131e-05, |
|
"loss": 0.5825, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.7862208713272543, |
|
"grad_norm": 0.6543667433759188, |
|
"learning_rate": 3.9595376262832485e-05, |
|
"loss": 0.5902, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.790273556231003, |
|
"grad_norm": 0.5569867081731052, |
|
"learning_rate": 3.9583938105517127e-05, |
|
"loss": 0.5873, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.7943262411347518, |
|
"grad_norm": 0.5146474726912704, |
|
"learning_rate": 3.957234222185657e-05, |
|
"loss": 0.5687, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.7983789260385005, |
|
"grad_norm": 0.6075094664699849, |
|
"learning_rate": 3.9560588705242474e-05, |
|
"loss": 0.5751, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.8024316109422492, |
|
"grad_norm": 0.5592164823460768, |
|
"learning_rate": 3.954867765033605e-05, |
|
"loss": 0.5628, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.806484295845998, |
|
"grad_norm": 0.6269616882516431, |
|
"learning_rate": 3.953660915306728e-05, |
|
"loss": 0.5618, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.8105369807497467, |
|
"grad_norm": 0.49159093632367734, |
|
"learning_rate": 3.952438331063419e-05, |
|
"loss": 0.5672, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.8145896656534954, |
|
"grad_norm": 0.5412141385890064, |
|
"learning_rate": 3.951200022150205e-05, |
|
"loss": 0.5526, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.8186423505572442, |
|
"grad_norm": 0.5523680647526468, |
|
"learning_rate": 3.949945998540253e-05, |
|
"loss": 0.5738, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.8226950354609929, |
|
"grad_norm": 0.5209321699786994, |
|
"learning_rate": 3.9486762703332993e-05, |
|
"loss": 0.5768, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.8267477203647416, |
|
"grad_norm": 0.4951370311494425, |
|
"learning_rate": 3.947390847755559e-05, |
|
"loss": 0.57, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.8308004052684904, |
|
"grad_norm": 0.625356227016392, |
|
"learning_rate": 3.946089741159648e-05, |
|
"loss": 0.5533, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.8348530901722391, |
|
"grad_norm": 0.5991522110754549, |
|
"learning_rate": 3.944772961024501e-05, |
|
"loss": 0.5625, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.8389057750759878, |
|
"grad_norm": 0.470587268822535, |
|
"learning_rate": 3.943440517955285e-05, |
|
"loss": 0.5822, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.8429584599797366, |
|
"grad_norm": 0.6911532138154093, |
|
"learning_rate": 3.9420924226833126e-05, |
|
"loss": 0.5888, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.8470111448834853, |
|
"grad_norm": 0.630396527076442, |
|
"learning_rate": 3.9407286860659566e-05, |
|
"loss": 0.5753, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.851063829787234, |
|
"grad_norm": 0.5508056236682327, |
|
"learning_rate": 3.9393493190865657e-05, |
|
"loss": 0.5563, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.8551165146909828, |
|
"grad_norm": 0.678269742132698, |
|
"learning_rate": 3.937954332854371e-05, |
|
"loss": 0.5547, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.8591691995947315, |
|
"grad_norm": 0.6585364970834816, |
|
"learning_rate": 3.9365437386044016e-05, |
|
"loss": 0.5739, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.8632218844984803, |
|
"grad_norm": 0.4660216266936489, |
|
"learning_rate": 3.935117547697387e-05, |
|
"loss": 0.5817, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.867274569402229, |
|
"grad_norm": 0.5923321720157314, |
|
"learning_rate": 3.933675771619675e-05, |
|
"loss": 0.569, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.8713272543059777, |
|
"grad_norm": 0.5694789946119627, |
|
"learning_rate": 3.932218421983131e-05, |
|
"loss": 0.5746, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.8753799392097265, |
|
"grad_norm": 0.5569276407724351, |
|
"learning_rate": 3.9307455105250484e-05, |
|
"loss": 0.5436, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.8794326241134752, |
|
"grad_norm": 0.4820247724431898, |
|
"learning_rate": 3.929257049108054e-05, |
|
"loss": 0.581, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.8834853090172239, |
|
"grad_norm": 0.47948368383095985, |
|
"learning_rate": 3.927753049720011e-05, |
|
"loss": 0.5687, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.8875379939209727, |
|
"grad_norm": 0.6066256958719515, |
|
"learning_rate": 3.9262335244739234e-05, |
|
"loss": 0.573, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.8915906788247214, |
|
"grad_norm": 0.6211350059735337, |
|
"learning_rate": 3.92469848560784e-05, |
|
"loss": 0.5804, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.8956433637284701, |
|
"grad_norm": 0.5206410858855138, |
|
"learning_rate": 3.923147945484751e-05, |
|
"loss": 0.5648, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.8996960486322189, |
|
"grad_norm": 0.42830071517698765, |
|
"learning_rate": 3.9215819165924956e-05, |
|
"loss": 0.5603, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.9037487335359676, |
|
"grad_norm": 0.41200542849521593, |
|
"learning_rate": 3.920000411543654e-05, |
|
"loss": 0.5651, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.9078014184397163, |
|
"grad_norm": 0.4290409268364959, |
|
"learning_rate": 3.9184034430754495e-05, |
|
"loss": 0.565, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.9118541033434651, |
|
"grad_norm": 0.4405104643499867, |
|
"learning_rate": 3.916791024049648e-05, |
|
"loss": 0.5632, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.9159067882472138, |
|
"grad_norm": 0.42779429421192805, |
|
"learning_rate": 3.91516316745245e-05, |
|
"loss": 0.5613, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.9199594731509625, |
|
"grad_norm": 0.4206017277162419, |
|
"learning_rate": 3.913519886394389e-05, |
|
"loss": 0.5753, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.9240121580547113, |
|
"grad_norm": 0.4582584226329624, |
|
"learning_rate": 3.911861194110225e-05, |
|
"loss": 0.5742, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.92806484295846, |
|
"grad_norm": 0.4800620788616759, |
|
"learning_rate": 3.910187103958837e-05, |
|
"loss": 0.5665, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.9321175278622087, |
|
"grad_norm": 0.5245292915572244, |
|
"learning_rate": 3.908497629423117e-05, |
|
"loss": 0.5757, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.9361702127659575, |
|
"grad_norm": 0.47694404335999496, |
|
"learning_rate": 3.9067927841098614e-05, |
|
"loss": 0.5649, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.9402228976697061, |
|
"grad_norm": 0.5225235646206117, |
|
"learning_rate": 3.9050725817496594e-05, |
|
"loss": 0.5697, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.9442755825734549, |
|
"grad_norm": 0.5304360342225947, |
|
"learning_rate": 3.9033370361967844e-05, |
|
"loss": 0.5702, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.9483282674772037, |
|
"grad_norm": 0.5091531550627542, |
|
"learning_rate": 3.901586161429081e-05, |
|
"loss": 0.5696, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.9523809523809523, |
|
"grad_norm": 0.5719550465702286, |
|
"learning_rate": 3.8998199715478545e-05, |
|
"loss": 0.5663, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.9564336372847011, |
|
"grad_norm": 0.70978299490918, |
|
"learning_rate": 3.8980384807777564e-05, |
|
"loss": 0.5736, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.9604863221884499, |
|
"grad_norm": 0.4549419224132296, |
|
"learning_rate": 3.896241703466667e-05, |
|
"loss": 0.5723, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.9645390070921985, |
|
"grad_norm": 0.5712759760236225, |
|
"learning_rate": 3.894429654085585e-05, |
|
"loss": 0.567, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.9685916919959473, |
|
"grad_norm": 0.525829280693854, |
|
"learning_rate": 3.892602347228505e-05, |
|
"loss": 0.555, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.9726443768996961, |
|
"grad_norm": 0.4739037818682846, |
|
"learning_rate": 3.890759797612307e-05, |
|
"loss": 0.556, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.9766970618034447, |
|
"grad_norm": 0.617108641706374, |
|
"learning_rate": 3.888902020076632e-05, |
|
"loss": 0.5656, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.9807497467071935, |
|
"grad_norm": 0.4781533838474172, |
|
"learning_rate": 3.887029029583764e-05, |
|
"loss": 0.5753, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.9848024316109423, |
|
"grad_norm": 0.42492821789126733, |
|
"learning_rate": 3.8851408412185125e-05, |
|
"loss": 0.5644, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.9888551165146909, |
|
"grad_norm": 0.48867856629364304, |
|
"learning_rate": 3.8832374701880855e-05, |
|
"loss": 0.5503, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.9929078014184397, |
|
"grad_norm": 0.4492864320443739, |
|
"learning_rate": 3.881318931821972e-05, |
|
"loss": 0.5727, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.9969604863221885, |
|
"grad_norm": 0.4713016518803041, |
|
"learning_rate": 3.879385241571817e-05, |
|
"loss": 0.5711, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.0030395136778116, |
|
"grad_norm": 0.557748540238745, |
|
"learning_rate": 3.8774364150112955e-05, |
|
"loss": 0.5424, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 1.0070921985815602, |
|
"grad_norm": 0.4621148175796167, |
|
"learning_rate": 3.8754724678359884e-05, |
|
"loss": 0.5231, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.011144883485309, |
|
"grad_norm": 0.6000954945612695, |
|
"learning_rate": 3.873493415863256e-05, |
|
"loss": 0.5134, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 1.0151975683890577, |
|
"grad_norm": 0.5535341535106301, |
|
"learning_rate": 3.871499275032111e-05, |
|
"loss": 0.5215, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.0192502532928065, |
|
"grad_norm": 0.5322754355179768, |
|
"learning_rate": 3.869490061403091e-05, |
|
"loss": 0.5264, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 1.0233029381965553, |
|
"grad_norm": 0.5582598034606526, |
|
"learning_rate": 3.867465791158124e-05, |
|
"loss": 0.5041, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 1.027355623100304, |
|
"grad_norm": 0.4871602974793124, |
|
"learning_rate": 3.865426480600407e-05, |
|
"loss": 0.5165, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 1.0314083080040526, |
|
"grad_norm": 0.45220811140253986, |
|
"learning_rate": 3.863372146154264e-05, |
|
"loss": 0.494, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 1.0354609929078014, |
|
"grad_norm": 0.5781802420159897, |
|
"learning_rate": 3.861302804365024e-05, |
|
"loss": 0.5204, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.0395136778115501, |
|
"grad_norm": 0.49766362648363055, |
|
"learning_rate": 3.85921847189888e-05, |
|
"loss": 0.5194, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 1.043566362715299, |
|
"grad_norm": 0.7462236846176246, |
|
"learning_rate": 3.85711916554276e-05, |
|
"loss": 0.5143, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 1.0476190476190477, |
|
"grad_norm": 0.5299219145348918, |
|
"learning_rate": 3.85500490220419e-05, |
|
"loss": 0.5255, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 1.0516717325227964, |
|
"grad_norm": 0.6539443857614234, |
|
"learning_rate": 3.852875698911154e-05, |
|
"loss": 0.5098, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 1.055724417426545, |
|
"grad_norm": 0.6274335444857269, |
|
"learning_rate": 3.850731572811963e-05, |
|
"loss": 0.5156, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.0597771023302938, |
|
"grad_norm": 0.5194345553113586, |
|
"learning_rate": 3.848572541175116e-05, |
|
"loss": 0.527, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 1.0638297872340425, |
|
"grad_norm": 0.46848556483468673, |
|
"learning_rate": 3.846398621389154e-05, |
|
"loss": 0.5111, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 1.0678824721377913, |
|
"grad_norm": 0.49878842850120786, |
|
"learning_rate": 3.84420983096253e-05, |
|
"loss": 0.536, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 1.07193515704154, |
|
"grad_norm": 0.6059383530690796, |
|
"learning_rate": 3.8420061875234606e-05, |
|
"loss": 0.5249, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 1.0759878419452888, |
|
"grad_norm": 0.5790965591868338, |
|
"learning_rate": 3.839787708819787e-05, |
|
"loss": 0.5153, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.0800405268490374, |
|
"grad_norm": 0.5563861248066765, |
|
"learning_rate": 3.8375544127188325e-05, |
|
"loss": 0.5205, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 1.0840932117527862, |
|
"grad_norm": 0.46485524046621957, |
|
"learning_rate": 3.8353063172072564e-05, |
|
"loss": 0.5222, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 1.088145896656535, |
|
"grad_norm": 0.6112401445203366, |
|
"learning_rate": 3.8330434403909105e-05, |
|
"loss": 0.5027, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 1.0921985815602837, |
|
"grad_norm": 0.6391308635690197, |
|
"learning_rate": 3.8307658004946934e-05, |
|
"loss": 0.5111, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 1.0962512664640325, |
|
"grad_norm": 0.47946903579815237, |
|
"learning_rate": 3.8284734158624046e-05, |
|
"loss": 0.5165, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.1003039513677813, |
|
"grad_norm": 0.5023507479940311, |
|
"learning_rate": 3.826166304956594e-05, |
|
"loss": 0.5179, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 1.1043566362715298, |
|
"grad_norm": 0.5185841466978587, |
|
"learning_rate": 3.8238444863584164e-05, |
|
"loss": 0.522, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 1.1084093211752786, |
|
"grad_norm": 0.4517335732324085, |
|
"learning_rate": 3.821507978767479e-05, |
|
"loss": 0.5142, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 1.1124620060790273, |
|
"grad_norm": 0.46604263922948747, |
|
"learning_rate": 3.819156801001693e-05, |
|
"loss": 0.5001, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 1.1165146909827761, |
|
"grad_norm": 0.5147897257218349, |
|
"learning_rate": 3.816790971997121e-05, |
|
"loss": 0.5155, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.1205673758865249, |
|
"grad_norm": 0.4254210258903041, |
|
"learning_rate": 3.8144105108078246e-05, |
|
"loss": 0.5156, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 1.1246200607902737, |
|
"grad_norm": 0.5500677061121733, |
|
"learning_rate": 3.81201543660571e-05, |
|
"loss": 0.5038, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 1.1286727456940222, |
|
"grad_norm": 0.5780053780820302, |
|
"learning_rate": 3.809605768680377e-05, |
|
"loss": 0.5303, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 1.132725430597771, |
|
"grad_norm": 0.48038898918683315, |
|
"learning_rate": 3.807181526438958e-05, |
|
"loss": 0.4946, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 1.1367781155015197, |
|
"grad_norm": 0.4737413153652073, |
|
"learning_rate": 3.8047427294059697e-05, |
|
"loss": 0.5096, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.1408308004052685, |
|
"grad_norm": 0.6956302709338007, |
|
"learning_rate": 3.802289397223145e-05, |
|
"loss": 0.5313, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 1.1448834853090173, |
|
"grad_norm": 0.5529285979675868, |
|
"learning_rate": 3.7998215496492854e-05, |
|
"loss": 0.5103, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 1.148936170212766, |
|
"grad_norm": 0.4784796828354979, |
|
"learning_rate": 3.797339206560096e-05, |
|
"loss": 0.4977, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 1.1529888551165146, |
|
"grad_norm": 0.6014494635914271, |
|
"learning_rate": 3.794842387948027e-05, |
|
"loss": 0.5375, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 1.1570415400202634, |
|
"grad_norm": 0.5726868670007542, |
|
"learning_rate": 3.7923311139221114e-05, |
|
"loss": 0.5235, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.1610942249240122, |
|
"grad_norm": 0.526775308165516, |
|
"learning_rate": 3.7898054047078054e-05, |
|
"loss": 0.5084, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 1.165146909827761, |
|
"grad_norm": 0.5666402167707913, |
|
"learning_rate": 3.787265280646825e-05, |
|
"loss": 0.5303, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 1.1691995947315097, |
|
"grad_norm": 0.41073723547254526, |
|
"learning_rate": 3.7847107621969786e-05, |
|
"loss": 0.52, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 1.1732522796352582, |
|
"grad_norm": 0.47841656699954654, |
|
"learning_rate": 3.7821418699320064e-05, |
|
"loss": 0.5107, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 1.177304964539007, |
|
"grad_norm": 0.4954360249327025, |
|
"learning_rate": 3.7795586245414145e-05, |
|
"loss": 0.4967, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.1813576494427558, |
|
"grad_norm": 0.4943642634271794, |
|
"learning_rate": 3.776961046830306e-05, |
|
"loss": 0.5206, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 1.1854103343465046, |
|
"grad_norm": 0.4902327515935834, |
|
"learning_rate": 3.774349157719215e-05, |
|
"loss": 0.5156, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 1.1894630192502533, |
|
"grad_norm": 0.4757190487796499, |
|
"learning_rate": 3.7717229782439365e-05, |
|
"loss": 0.52, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 1.193515704154002, |
|
"grad_norm": 0.44536843193264514, |
|
"learning_rate": 3.769082529555359e-05, |
|
"loss": 0.5373, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 1.1975683890577509, |
|
"grad_norm": 0.4467646110528696, |
|
"learning_rate": 3.766427832919294e-05, |
|
"loss": 0.4917, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.2016210739614994, |
|
"grad_norm": 0.47799260112851605, |
|
"learning_rate": 3.7637589097163024e-05, |
|
"loss": 0.5166, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 1.2056737588652482, |
|
"grad_norm": 0.4735206304259062, |
|
"learning_rate": 3.761075781441526e-05, |
|
"loss": 0.5146, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 1.209726443768997, |
|
"grad_norm": 0.45005909508358527, |
|
"learning_rate": 3.75837846970451e-05, |
|
"loss": 0.5383, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 1.2137791286727457, |
|
"grad_norm": 0.5473442276927805, |
|
"learning_rate": 3.755666996229032e-05, |
|
"loss": 0.5077, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 1.2178318135764945, |
|
"grad_norm": 0.4609985977708474, |
|
"learning_rate": 3.752941382852927e-05, |
|
"loss": 0.541, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.221884498480243, |
|
"grad_norm": 0.4898295955606378, |
|
"learning_rate": 3.7502016515279115e-05, |
|
"loss": 0.5231, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 1.2259371833839918, |
|
"grad_norm": 0.48905209617225276, |
|
"learning_rate": 3.7474478243194043e-05, |
|
"loss": 0.497, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 1.2299898682877406, |
|
"grad_norm": 0.5177014980600935, |
|
"learning_rate": 3.744679923406351e-05, |
|
"loss": 0.4971, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 1.2340425531914894, |
|
"grad_norm": 0.462703001863924, |
|
"learning_rate": 3.741897971081043e-05, |
|
"loss": 0.504, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 1.2380952380952381, |
|
"grad_norm": 0.49446811749889646, |
|
"learning_rate": 3.739101989748946e-05, |
|
"loss": 0.5001, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.242147922998987, |
|
"grad_norm": 0.46788869282102064, |
|
"learning_rate": 3.7362920019285066e-05, |
|
"loss": 0.5289, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 1.2462006079027357, |
|
"grad_norm": 0.5216593834012441, |
|
"learning_rate": 3.73346803025098e-05, |
|
"loss": 0.5269, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 1.2502532928064842, |
|
"grad_norm": 0.3844022698212344, |
|
"learning_rate": 3.730630097460247e-05, |
|
"loss": 0.5085, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 1.254305977710233, |
|
"grad_norm": 0.5163509112687578, |
|
"learning_rate": 3.727778226412628e-05, |
|
"loss": 0.5086, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 1.2583586626139818, |
|
"grad_norm": 0.5400855629158948, |
|
"learning_rate": 3.7249124400767006e-05, |
|
"loss": 0.5215, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.2624113475177305, |
|
"grad_norm": 0.36928789401375145, |
|
"learning_rate": 3.722032761533114e-05, |
|
"loss": 0.5078, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 1.2664640324214793, |
|
"grad_norm": 0.49916489726792845, |
|
"learning_rate": 3.719139213974403e-05, |
|
"loss": 0.5268, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 1.2705167173252279, |
|
"grad_norm": 0.43250673810166007, |
|
"learning_rate": 3.7162318207048006e-05, |
|
"loss": 0.5081, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 1.2745694022289766, |
|
"grad_norm": 0.3641961668596623, |
|
"learning_rate": 3.713310605140055e-05, |
|
"loss": 0.5157, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 1.2786220871327254, |
|
"grad_norm": 0.4708017584884893, |
|
"learning_rate": 3.710375590807233e-05, |
|
"loss": 0.5239, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.2826747720364742, |
|
"grad_norm": 0.4469251197502781, |
|
"learning_rate": 3.7074268013445365e-05, |
|
"loss": 0.5191, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 1.286727456940223, |
|
"grad_norm": 0.4170792540276034, |
|
"learning_rate": 3.7044642605011114e-05, |
|
"loss": 0.5271, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 1.2907801418439715, |
|
"grad_norm": 0.8326916771854185, |
|
"learning_rate": 3.701487992136854e-05, |
|
"loss": 0.5223, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 1.2948328267477205, |
|
"grad_norm": 0.38862422748363334, |
|
"learning_rate": 3.69849802022222e-05, |
|
"loss": 0.5254, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 1.298885511651469, |
|
"grad_norm": 1.263726522086109, |
|
"learning_rate": 3.6954943688380334e-05, |
|
"loss": 0.5264, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.3029381965552178, |
|
"grad_norm": 0.39284217743353667, |
|
"learning_rate": 3.692477062175289e-05, |
|
"loss": 0.5009, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 1.3069908814589666, |
|
"grad_norm": 0.5596339534907246, |
|
"learning_rate": 3.689446124534958e-05, |
|
"loss": 0.5079, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 1.3110435663627154, |
|
"grad_norm": 0.47488138701711996, |
|
"learning_rate": 3.686401580327799e-05, |
|
"loss": 0.5179, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 1.3150962512664641, |
|
"grad_norm": 0.42193105336906295, |
|
"learning_rate": 3.683343454074149e-05, |
|
"loss": 0.5112, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 1.3191489361702127, |
|
"grad_norm": 0.41714536262216384, |
|
"learning_rate": 3.6802717704037386e-05, |
|
"loss": 0.5359, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.3232016210739614, |
|
"grad_norm": 0.4523953322512892, |
|
"learning_rate": 3.6771865540554855e-05, |
|
"loss": 0.5164, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 1.3272543059777102, |
|
"grad_norm": 0.5124556975023729, |
|
"learning_rate": 3.674087829877297e-05, |
|
"loss": 0.5295, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 1.331306990881459, |
|
"grad_norm": 0.5163116635384236, |
|
"learning_rate": 3.6709756228258735e-05, |
|
"loss": 0.5229, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 1.3353596757852078, |
|
"grad_norm": 0.4757158040810718, |
|
"learning_rate": 3.667849957966501e-05, |
|
"loss": 0.5158, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 1.3394123606889563, |
|
"grad_norm": 0.4247555418435866, |
|
"learning_rate": 3.6647108604728546e-05, |
|
"loss": 0.5168, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.3434650455927053, |
|
"grad_norm": 0.5571299450683076, |
|
"learning_rate": 3.661558355626795e-05, |
|
"loss": 0.5192, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 1.3475177304964538, |
|
"grad_norm": 0.5501084967380048, |
|
"learning_rate": 3.658392468818163e-05, |
|
"loss": 0.5299, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 1.3515704154002026, |
|
"grad_norm": 0.4472445692430267, |
|
"learning_rate": 3.655213225544574e-05, |
|
"loss": 0.5086, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 1.3556231003039514, |
|
"grad_norm": 0.41696880674648146, |
|
"learning_rate": 3.652020651411218e-05, |
|
"loss": 0.5273, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 1.3596757852077002, |
|
"grad_norm": 0.4019100709648588, |
|
"learning_rate": 3.6488147721306474e-05, |
|
"loss": 0.5153, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.363728470111449, |
|
"grad_norm": 0.5436194254852269, |
|
"learning_rate": 3.645595613522574e-05, |
|
"loss": 0.506, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 1.3677811550151975, |
|
"grad_norm": 0.46236889571111445, |
|
"learning_rate": 3.642363201513657e-05, |
|
"loss": 0.5052, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 1.3718338399189463, |
|
"grad_norm": 0.4903359934637521, |
|
"learning_rate": 3.6391175621373006e-05, |
|
"loss": 0.515, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 1.375886524822695, |
|
"grad_norm": 0.4995139536483641, |
|
"learning_rate": 3.6358587215334355e-05, |
|
"loss": 0.4996, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 1.3799392097264438, |
|
"grad_norm": 0.50836386237976, |
|
"learning_rate": 3.632586705948318e-05, |
|
"loss": 0.5174, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.3839918946301926, |
|
"grad_norm": 0.5838425279381627, |
|
"learning_rate": 3.629301541734311e-05, |
|
"loss": 0.5068, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 1.3880445795339411, |
|
"grad_norm": 0.4779767665462759, |
|
"learning_rate": 3.626003255349676e-05, |
|
"loss": 0.5146, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 1.39209726443769, |
|
"grad_norm": 0.6312449403964024, |
|
"learning_rate": 3.622691873358357e-05, |
|
"loss": 0.5017, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 1.3961499493414387, |
|
"grad_norm": 0.6777622678473971, |
|
"learning_rate": 3.61936742242977e-05, |
|
"loss": 0.526, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 1.4002026342451874, |
|
"grad_norm": 0.5438415883023792, |
|
"learning_rate": 3.6160299293385864e-05, |
|
"loss": 0.5111, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.4042553191489362, |
|
"grad_norm": 0.6023331337036119, |
|
"learning_rate": 3.612679420964516e-05, |
|
"loss": 0.5092, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 1.408308004052685, |
|
"grad_norm": 0.6440575479298628, |
|
"learning_rate": 3.609315924292092e-05, |
|
"loss": 0.5015, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 1.4123606889564337, |
|
"grad_norm": 0.36807012829611774, |
|
"learning_rate": 3.6059394664104554e-05, |
|
"loss": 0.5132, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 1.4164133738601823, |
|
"grad_norm": 0.5321152648650316, |
|
"learning_rate": 3.602550074513133e-05, |
|
"loss": 0.5046, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 1.420466058763931, |
|
"grad_norm": 0.3879443485912782, |
|
"learning_rate": 3.599147775897822e-05, |
|
"loss": 0.5147, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.4245187436676798, |
|
"grad_norm": 0.4533658510888897, |
|
"learning_rate": 3.595732597966167e-05, |
|
"loss": 0.5221, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 1.4285714285714286, |
|
"grad_norm": 0.3514284045242658, |
|
"learning_rate": 3.592304568223542e-05, |
|
"loss": 0.5261, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 1.4326241134751774, |
|
"grad_norm": 0.42960114927603743, |
|
"learning_rate": 3.588863714278826e-05, |
|
"loss": 0.5239, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 1.436676798378926, |
|
"grad_norm": 0.4157234266114839, |
|
"learning_rate": 3.585410063844186e-05, |
|
"loss": 0.5136, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 1.4407294832826747, |
|
"grad_norm": 0.3868995120542772, |
|
"learning_rate": 3.581943644734846e-05, |
|
"loss": 0.5134, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.4447821681864235, |
|
"grad_norm": 0.3725545268594919, |
|
"learning_rate": 3.578464484868869e-05, |
|
"loss": 0.5028, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 1.4488348530901722, |
|
"grad_norm": 0.4222244466611466, |
|
"learning_rate": 3.5749726122669316e-05, |
|
"loss": 0.4973, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 1.452887537993921, |
|
"grad_norm": 0.3404146357414848, |
|
"learning_rate": 3.5714680550520943e-05, |
|
"loss": 0.5097, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 1.4569402228976698, |
|
"grad_norm": 0.48650870685084163, |
|
"learning_rate": 3.5679508414495794e-05, |
|
"loss": 0.5137, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 1.4609929078014185, |
|
"grad_norm": 0.47532553407670547, |
|
"learning_rate": 3.564420999786543e-05, |
|
"loss": 0.511, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.465045592705167, |
|
"grad_norm": 0.40615098973085634, |
|
"learning_rate": 3.560878558491842e-05, |
|
"loss": 0.4951, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 1.4690982776089159, |
|
"grad_norm": 0.3843917841242262, |
|
"learning_rate": 3.5573235460958145e-05, |
|
"loss": 0.5204, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 1.4731509625126646, |
|
"grad_norm": 0.4302570838829243, |
|
"learning_rate": 3.553755991230039e-05, |
|
"loss": 0.5055, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 1.4772036474164134, |
|
"grad_norm": 0.5110468740171087, |
|
"learning_rate": 3.5501759226271144e-05, |
|
"loss": 0.5188, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 1.4812563323201622, |
|
"grad_norm": 0.4404063573793456, |
|
"learning_rate": 3.546583369120419e-05, |
|
"loss": 0.5254, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.4853090172239107, |
|
"grad_norm": 0.49985110591406146, |
|
"learning_rate": 3.5429783596438864e-05, |
|
"loss": 0.5182, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 1.4893617021276595, |
|
"grad_norm": 0.39012242982584083, |
|
"learning_rate": 3.539360923231766e-05, |
|
"loss": 0.5062, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 1.4934143870314083, |
|
"grad_norm": 0.5959979678203442, |
|
"learning_rate": 3.535731089018394e-05, |
|
"loss": 0.5112, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 1.497467071935157, |
|
"grad_norm": 0.42843565077267814, |
|
"learning_rate": 3.532088886237956e-05, |
|
"loss": 0.5257, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 1.5015197568389058, |
|
"grad_norm": 0.550645301428324, |
|
"learning_rate": 3.528434344224253e-05, |
|
"loss": 0.5306, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.5055724417426544, |
|
"grad_norm": 0.5091182750265051, |
|
"learning_rate": 3.524767492410464e-05, |
|
"loss": 0.5125, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 1.5096251266464034, |
|
"grad_norm": 0.374279794609297, |
|
"learning_rate": 3.521088360328908e-05, |
|
"loss": 0.5122, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.513677811550152, |
|
"grad_norm": 0.5275170206560594, |
|
"learning_rate": 3.517396977610811e-05, |
|
"loss": 0.5007, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 1.5177304964539007, |
|
"grad_norm": 0.46321340058218863, |
|
"learning_rate": 3.5136933739860595e-05, |
|
"loss": 0.5172, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 1.5217831813576495, |
|
"grad_norm": 0.47124638484942266, |
|
"learning_rate": 3.509977579282971e-05, |
|
"loss": 0.5178, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.5258358662613982, |
|
"grad_norm": 0.4400460060669203, |
|
"learning_rate": 3.5062496234280424e-05, |
|
"loss": 0.5095, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 1.529888551165147, |
|
"grad_norm": 0.44787783607543513, |
|
"learning_rate": 3.502509536445719e-05, |
|
"loss": 0.4985, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 1.5339412360688955, |
|
"grad_norm": 0.45270159262455223, |
|
"learning_rate": 3.498757348458147e-05, |
|
"loss": 0.5065, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 1.5379939209726445, |
|
"grad_norm": 0.47544400455654506, |
|
"learning_rate": 3.4949930896849324e-05, |
|
"loss": 0.5083, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 1.542046605876393, |
|
"grad_norm": 0.4502265060682666, |
|
"learning_rate": 3.491216790442899e-05, |
|
"loss": 0.5189, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.5460992907801419, |
|
"grad_norm": 0.42386990203135555, |
|
"learning_rate": 3.487428481145839e-05, |
|
"loss": 0.5122, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 1.5501519756838906, |
|
"grad_norm": 0.3778941917367464, |
|
"learning_rate": 3.483628192304278e-05, |
|
"loss": 0.5087, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 1.5542046605876392, |
|
"grad_norm": 0.41910394900737297, |
|
"learning_rate": 3.479815954525219e-05, |
|
"loss": 0.5112, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 1.5582573454913882, |
|
"grad_norm": 0.35683601630209655, |
|
"learning_rate": 3.475991798511899e-05, |
|
"loss": 0.4946, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 1.5623100303951367, |
|
"grad_norm": 0.3754391276161083, |
|
"learning_rate": 3.4721557550635464e-05, |
|
"loss": 0.5099, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.5663627152988855, |
|
"grad_norm": 0.3630853363738009, |
|
"learning_rate": 3.468307855075128e-05, |
|
"loss": 0.5073, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 1.5704154002026343, |
|
"grad_norm": 0.39163727676025634, |
|
"learning_rate": 3.4644481295371005e-05, |
|
"loss": 0.4896, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 1.574468085106383, |
|
"grad_norm": 0.39962278173165977, |
|
"learning_rate": 3.460576609535163e-05, |
|
"loss": 0.5208, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 1.5785207700101318, |
|
"grad_norm": 0.3640182525208087, |
|
"learning_rate": 3.456693326250006e-05, |
|
"loss": 0.5193, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 1.5825734549138804, |
|
"grad_norm": 0.3871034964125718, |
|
"learning_rate": 3.452798310957058e-05, |
|
"loss": 0.5163, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.5866261398176293, |
|
"grad_norm": 0.4708321345242008, |
|
"learning_rate": 3.4488915950262386e-05, |
|
"loss": 0.5136, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 1.590678824721378, |
|
"grad_norm": 0.4262443509825757, |
|
"learning_rate": 3.4449732099216985e-05, |
|
"loss": 0.5019, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 1.5947315096251267, |
|
"grad_norm": 0.4960883493154585, |
|
"learning_rate": 3.441043187201574e-05, |
|
"loss": 0.4986, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 1.5987841945288754, |
|
"grad_norm": 0.46348185074641, |
|
"learning_rate": 3.437101558517728e-05, |
|
"loss": 0.515, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 1.602836879432624, |
|
"grad_norm": 0.45562489518813776, |
|
"learning_rate": 3.433148355615496e-05, |
|
"loss": 0.5181, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.606889564336373, |
|
"grad_norm": 0.517070227140482, |
|
"learning_rate": 3.4291836103334294e-05, |
|
"loss": 0.5144, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.6109422492401215, |
|
"grad_norm": 0.3736507878674384, |
|
"learning_rate": 3.425207354603043e-05, |
|
"loss": 0.5117, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 1.6149949341438703, |
|
"grad_norm": 0.48526320851915467, |
|
"learning_rate": 3.421219620448553e-05, |
|
"loss": 0.4929, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 1.619047619047619, |
|
"grad_norm": 0.5179302101710983, |
|
"learning_rate": 3.417220439986623e-05, |
|
"loss": 0.5075, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 1.6231003039513676, |
|
"grad_norm": 0.349021646962347, |
|
"learning_rate": 3.4132098454261024e-05, |
|
"loss": 0.5243, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.6271529888551166, |
|
"grad_norm": 0.44217883346559617, |
|
"learning_rate": 3.4091878690677676e-05, |
|
"loss": 0.5013, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 1.6312056737588652, |
|
"grad_norm": 0.4527982424641868, |
|
"learning_rate": 3.405154543304065e-05, |
|
"loss": 0.5321, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 1.635258358662614, |
|
"grad_norm": 0.4129390805088078, |
|
"learning_rate": 3.401109900618843e-05, |
|
"loss": 0.5207, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 1.6393110435663627, |
|
"grad_norm": 0.3980799190471002, |
|
"learning_rate": 3.3970539735870996e-05, |
|
"loss": 0.5049, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 1.6433637284701115, |
|
"grad_norm": 0.4908845598595389, |
|
"learning_rate": 3.392986794874714e-05, |
|
"loss": 0.5162, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.6474164133738602, |
|
"grad_norm": 0.43055821311817066, |
|
"learning_rate": 3.388908397238184e-05, |
|
"loss": 0.516, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 1.6514690982776088, |
|
"grad_norm": 0.37208979604796427, |
|
"learning_rate": 3.384818813524362e-05, |
|
"loss": 0.5052, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 1.6555217831813578, |
|
"grad_norm": 0.4802973786274369, |
|
"learning_rate": 3.380718076670195e-05, |
|
"loss": 0.5155, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.6595744680851063, |
|
"grad_norm": 0.41314622362844466, |
|
"learning_rate": 3.376606219702454e-05, |
|
"loss": 0.5154, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 1.663627152988855, |
|
"grad_norm": 0.3771455696682645, |
|
"learning_rate": 3.372483275737468e-05, |
|
"loss": 0.5211, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.6676798378926039, |
|
"grad_norm": 0.477617651507032, |
|
"learning_rate": 3.368349277980861e-05, |
|
"loss": 0.511, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 1.6717325227963524, |
|
"grad_norm": 0.45993886520684657, |
|
"learning_rate": 3.3642042597272844e-05, |
|
"loss": 0.5165, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 1.6757852077001014, |
|
"grad_norm": 0.376511524884432, |
|
"learning_rate": 3.360048254360144e-05, |
|
"loss": 0.5094, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 1.67983789260385, |
|
"grad_norm": 0.48774517907104364, |
|
"learning_rate": 3.355881295351336e-05, |
|
"loss": 0.5231, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 1.6838905775075987, |
|
"grad_norm": 0.5108002410323854, |
|
"learning_rate": 3.351703416260975e-05, |
|
"loss": 0.4992, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.6879432624113475, |
|
"grad_norm": 0.3770227765274486, |
|
"learning_rate": 3.347514650737126e-05, |
|
"loss": 0.507, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 1.6919959473150963, |
|
"grad_norm": 0.581922092754776, |
|
"learning_rate": 3.3433150325155295e-05, |
|
"loss": 0.4966, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 1.696048632218845, |
|
"grad_norm": 0.40256271476525934, |
|
"learning_rate": 3.339104595419334e-05, |
|
"loss": 0.5087, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 1.7001013171225936, |
|
"grad_norm": 0.40834122655581984, |
|
"learning_rate": 3.3348833733588204e-05, |
|
"loss": 0.5246, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 1.7041540020263426, |
|
"grad_norm": 0.47183102353395956, |
|
"learning_rate": 3.3306514003311305e-05, |
|
"loss": 0.5262, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.7082066869300911, |
|
"grad_norm": 0.3836566756031701, |
|
"learning_rate": 3.326408710419996e-05, |
|
"loss": 0.5106, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 1.71225937183384, |
|
"grad_norm": 0.4335688059504242, |
|
"learning_rate": 3.322155337795454e-05, |
|
"loss": 0.5092, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 1.7163120567375887, |
|
"grad_norm": 0.5614115497390675, |
|
"learning_rate": 3.317891316713587e-05, |
|
"loss": 0.5093, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 1.7203647416413372, |
|
"grad_norm": 0.4133951074959847, |
|
"learning_rate": 3.313616681516231e-05, |
|
"loss": 0.5011, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 1.7244174265450862, |
|
"grad_norm": 0.3637814659771627, |
|
"learning_rate": 3.309331466630713e-05, |
|
"loss": 0.5018, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.7284701114488348, |
|
"grad_norm": 0.46197070614050006, |
|
"learning_rate": 3.305035706569563e-05, |
|
"loss": 0.5135, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 1.7325227963525835, |
|
"grad_norm": 0.3976842330233229, |
|
"learning_rate": 3.3007294359302433e-05, |
|
"loss": 0.5204, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 1.7365754812563323, |
|
"grad_norm": 0.3778469305830394, |
|
"learning_rate": 3.296412689394864e-05, |
|
"loss": 0.5056, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 1.740628166160081, |
|
"grad_norm": 0.5189439001834191, |
|
"learning_rate": 3.292085501729909e-05, |
|
"loss": 0.505, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 1.7446808510638299, |
|
"grad_norm": 0.3599803567954272, |
|
"learning_rate": 3.2877479077859534e-05, |
|
"loss": 0.51, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.7487335359675784, |
|
"grad_norm": 0.5131805580404335, |
|
"learning_rate": 3.283399942497381e-05, |
|
"loss": 0.5228, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 1.7527862208713274, |
|
"grad_norm": 0.42081440290177197, |
|
"learning_rate": 3.279041640882108e-05, |
|
"loss": 0.5172, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 1.756838905775076, |
|
"grad_norm": 0.360106585548781, |
|
"learning_rate": 3.2746730380412964e-05, |
|
"loss": 0.5098, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 1.7608915906788247, |
|
"grad_norm": 0.4092394195612383, |
|
"learning_rate": 3.2702941691590726e-05, |
|
"loss": 0.5204, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 1.7649442755825735, |
|
"grad_norm": 0.42537013613511365, |
|
"learning_rate": 3.265905069502244e-05, |
|
"loss": 0.5088, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.768996960486322, |
|
"grad_norm": 0.4074066455580255, |
|
"learning_rate": 3.261505774420016e-05, |
|
"loss": 0.5049, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 1.773049645390071, |
|
"grad_norm": 0.43617451342870184, |
|
"learning_rate": 3.257096319343707e-05, |
|
"loss": 0.5085, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 1.7771023302938196, |
|
"grad_norm": 0.4001265046086003, |
|
"learning_rate": 3.2526767397864614e-05, |
|
"loss": 0.5138, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 1.7811550151975684, |
|
"grad_norm": 0.36978425541297655, |
|
"learning_rate": 3.248247071342966e-05, |
|
"loss": 0.5028, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 1.7852077001013171, |
|
"grad_norm": 0.44176015129427154, |
|
"learning_rate": 3.243807349689161e-05, |
|
"loss": 0.4952, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.789260385005066, |
|
"grad_norm": 0.30661975612240344, |
|
"learning_rate": 3.2393576105819544e-05, |
|
"loss": 0.5034, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 1.7933130699088147, |
|
"grad_norm": 0.4584117550853115, |
|
"learning_rate": 3.2348978898589333e-05, |
|
"loss": 0.5005, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 1.7973657548125632, |
|
"grad_norm": 0.35478248501504045, |
|
"learning_rate": 3.230428223438075e-05, |
|
"loss": 0.507, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 1.8014184397163122, |
|
"grad_norm": 0.41396256080427335, |
|
"learning_rate": 3.225948647317459e-05, |
|
"loss": 0.5125, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 1.8054711246200608, |
|
"grad_norm": 0.47181320797981524, |
|
"learning_rate": 3.2214591975749745e-05, |
|
"loss": 0.5101, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.8095238095238095, |
|
"grad_norm": 0.4122322160330429, |
|
"learning_rate": 3.216959910368034e-05, |
|
"loss": 0.4982, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 1.8135764944275583, |
|
"grad_norm": 0.5691128851101503, |
|
"learning_rate": 3.212450821933277e-05, |
|
"loss": 0.5004, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 1.8176291793313069, |
|
"grad_norm": 0.4564192474589319, |
|
"learning_rate": 3.207931968586281e-05, |
|
"loss": 0.5007, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 1.8216818642350558, |
|
"grad_norm": 0.5985012738884663, |
|
"learning_rate": 3.203403386721272e-05, |
|
"loss": 0.5056, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 1.8257345491388044, |
|
"grad_norm": 0.5291397435563098, |
|
"learning_rate": 3.1988651128108245e-05, |
|
"loss": 0.5112, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.8297872340425532, |
|
"grad_norm": 0.5046191799902497, |
|
"learning_rate": 3.194317183405573e-05, |
|
"loss": 0.5317, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 1.833839918946302, |
|
"grad_norm": 0.5715407202210351, |
|
"learning_rate": 3.189759635133914e-05, |
|
"loss": 0.5083, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 1.8378926038500507, |
|
"grad_norm": 0.4680357631599358, |
|
"learning_rate": 3.185192504701718e-05, |
|
"loss": 0.5033, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 1.8419452887537995, |
|
"grad_norm": 0.4448820935643253, |
|
"learning_rate": 3.1806158288920234e-05, |
|
"loss": 0.4861, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 1.845997973657548, |
|
"grad_norm": 0.45074109091554493, |
|
"learning_rate": 3.1760296445647477e-05, |
|
"loss": 0.505, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.850050658561297, |
|
"grad_norm": 0.4379497213059703, |
|
"learning_rate": 3.1714339886563896e-05, |
|
"loss": 0.4982, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.8541033434650456, |
|
"grad_norm": 0.4678026548600231, |
|
"learning_rate": 3.166828898179731e-05, |
|
"loss": 0.4903, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 1.8581560283687943, |
|
"grad_norm": 0.359024494586788, |
|
"learning_rate": 3.162214410223536e-05, |
|
"loss": 0.5138, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 1.8622087132725431, |
|
"grad_norm": 0.4542815410644416, |
|
"learning_rate": 3.157590561952257e-05, |
|
"loss": 0.5207, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 1.8662613981762917, |
|
"grad_norm": 0.3808222897699044, |
|
"learning_rate": 3.152957390605732e-05, |
|
"loss": 0.5077, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.8703140830800407, |
|
"grad_norm": 0.3966850415278545, |
|
"learning_rate": 3.148314933498886e-05, |
|
"loss": 0.5025, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 1.8743667679837892, |
|
"grad_norm": 0.4227418314994302, |
|
"learning_rate": 3.143663228021431e-05, |
|
"loss": 0.52, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.878419452887538, |
|
"grad_norm": 0.36865592472772585, |
|
"learning_rate": 3.1390023116375624e-05, |
|
"loss": 0.5094, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 1.8824721377912867, |
|
"grad_norm": 0.4230086571571748, |
|
"learning_rate": 3.134332221885661e-05, |
|
"loss": 0.5069, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 1.8865248226950353, |
|
"grad_norm": 0.4865376888736305, |
|
"learning_rate": 3.129652996377987e-05, |
|
"loss": 0.506, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.8905775075987843, |
|
"grad_norm": 0.3586565691382082, |
|
"learning_rate": 3.12496467280038e-05, |
|
"loss": 0.503, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 1.8946301925025328, |
|
"grad_norm": 0.5173100489627686, |
|
"learning_rate": 3.120267288911952e-05, |
|
"loss": 0.4963, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 1.8986828774062816, |
|
"grad_norm": 0.3894794386148582, |
|
"learning_rate": 3.11556088254479e-05, |
|
"loss": 0.5081, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.9027355623100304, |
|
"grad_norm": 0.434087189230862, |
|
"learning_rate": 3.11084549160364e-05, |
|
"loss": 0.4978, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 1.9067882472137792, |
|
"grad_norm": 0.43540656327153193, |
|
"learning_rate": 3.106121154065615e-05, |
|
"loss": 0.5135, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.910840932117528, |
|
"grad_norm": 0.38835530867502965, |
|
"learning_rate": 3.1013879079798805e-05, |
|
"loss": 0.5064, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 1.9148936170212765, |
|
"grad_norm": 0.43364286096721644, |
|
"learning_rate": 3.096645791467348e-05, |
|
"loss": 0.5017, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 1.9189463019250255, |
|
"grad_norm": 0.46412885633388007, |
|
"learning_rate": 3.091894842720373e-05, |
|
"loss": 0.5105, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 1.922998986828774, |
|
"grad_norm": 0.327685361404935, |
|
"learning_rate": 3.0871351000024425e-05, |
|
"loss": 0.4937, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 1.9270516717325228, |
|
"grad_norm": 0.4003732793389209, |
|
"learning_rate": 3.0823666016478716e-05, |
|
"loss": 0.5026, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.9311043566362716, |
|
"grad_norm": 0.31030464775365824, |
|
"learning_rate": 3.0775893860614896e-05, |
|
"loss": 0.487, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 1.93515704154002, |
|
"grad_norm": 0.37844837216061555, |
|
"learning_rate": 3.0728034917183336e-05, |
|
"loss": 0.4943, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 1.939209726443769, |
|
"grad_norm": 0.3761041097368198, |
|
"learning_rate": 3.06800895716334e-05, |
|
"loss": 0.515, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 1.9432624113475176, |
|
"grad_norm": 0.34105499333301525, |
|
"learning_rate": 3.063205821011029e-05, |
|
"loss": 0.5022, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 1.9473150962512664, |
|
"grad_norm": 0.3434204163494274, |
|
"learning_rate": 3.0583941219452016e-05, |
|
"loss": 0.5059, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.9513677811550152, |
|
"grad_norm": 0.342798220622963, |
|
"learning_rate": 3.053573898718618e-05, |
|
"loss": 0.5161, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 1.955420466058764, |
|
"grad_norm": 0.38622806206243165, |
|
"learning_rate": 3.0487451901526956e-05, |
|
"loss": 0.5219, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 1.9594731509625127, |
|
"grad_norm": 0.4246240951222412, |
|
"learning_rate": 3.0439080351371875e-05, |
|
"loss": 0.5032, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 1.9635258358662613, |
|
"grad_norm": 0.3556257540680832, |
|
"learning_rate": 3.0390624726298764e-05, |
|
"loss": 0.4864, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 1.9675785207700103, |
|
"grad_norm": 0.36868876075038537, |
|
"learning_rate": 3.034208541656255e-05, |
|
"loss": 0.5208, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.9716312056737588, |
|
"grad_norm": 0.3610588748401692, |
|
"learning_rate": 3.029346281309218e-05, |
|
"loss": 0.5041, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.9756838905775076, |
|
"grad_norm": 0.34937827685673983, |
|
"learning_rate": 3.0244757307487415e-05, |
|
"loss": 0.4913, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 1.9797365754812564, |
|
"grad_norm": 0.3477030210550178, |
|
"learning_rate": 3.019596929201569e-05, |
|
"loss": 0.5064, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 1.983789260385005, |
|
"grad_norm": 0.4227474125135131, |
|
"learning_rate": 3.0147099159608985e-05, |
|
"loss": 0.522, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 1.987841945288754, |
|
"grad_norm": 0.3094207511546184, |
|
"learning_rate": 3.0098147303860616e-05, |
|
"loss": 0.505, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.9918946301925025, |
|
"grad_norm": 0.4008816235367959, |
|
"learning_rate": 3.0049114119022117e-05, |
|
"loss": 0.4977, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 1.9959473150962512, |
|
"grad_norm": 0.3553228837321045, |
|
"learning_rate": 3.0000000000000004e-05, |
|
"loss": 0.503, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 2.0020263424518743, |
|
"grad_norm": 0.3957562871432883, |
|
"learning_rate": 2.995080534235264e-05, |
|
"loss": 0.47, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 2.0060790273556233, |
|
"grad_norm": 0.43386265612843367, |
|
"learning_rate": 2.9901530542287044e-05, |
|
"loss": 0.4379, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 2.010131712259372, |
|
"grad_norm": 0.6085637293603964, |
|
"learning_rate": 2.9852175996655676e-05, |
|
"loss": 0.4398, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 2.0141843971631204, |
|
"grad_norm": 0.4898735295242637, |
|
"learning_rate": 2.980274210295326e-05, |
|
"loss": 0.4434, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 2.0182370820668694, |
|
"grad_norm": 0.5061533783363372, |
|
"learning_rate": 2.9753229259313578e-05, |
|
"loss": 0.4414, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 2.022289766970618, |
|
"grad_norm": 0.4269512340349817, |
|
"learning_rate": 2.9703637864506274e-05, |
|
"loss": 0.4308, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 2.026342451874367, |
|
"grad_norm": 0.41223315654197756, |
|
"learning_rate": 2.965396831793362e-05, |
|
"loss": 0.4355, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 2.0303951367781155, |
|
"grad_norm": 0.4540618269403147, |
|
"learning_rate": 2.9604221019627316e-05, |
|
"loss": 0.4387, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.0344478216818644, |
|
"grad_norm": 0.43871473472012845, |
|
"learning_rate": 2.955439637024526e-05, |
|
"loss": 0.4248, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 2.038500506585613, |
|
"grad_norm": 0.4505008021598321, |
|
"learning_rate": 2.9504494771068334e-05, |
|
"loss": 0.4446, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 2.0425531914893615, |
|
"grad_norm": 0.4172192649300292, |
|
"learning_rate": 2.9454516623997156e-05, |
|
"loss": 0.4302, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 2.0466058763931105, |
|
"grad_norm": 0.40146772547336085, |
|
"learning_rate": 2.9404462331548847e-05, |
|
"loss": 0.4439, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 2.050658561296859, |
|
"grad_norm": 0.39888178150287157, |
|
"learning_rate": 2.93543322968538e-05, |
|
"loss": 0.4343, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 2.054711246200608, |
|
"grad_norm": 0.4010324290539672, |
|
"learning_rate": 2.9304126923652428e-05, |
|
"loss": 0.4338, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 2.0587639311043566, |
|
"grad_norm": 0.3506868515718636, |
|
"learning_rate": 2.9253846616291896e-05, |
|
"loss": 0.436, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 2.062816616008105, |
|
"grad_norm": 0.4129525003336646, |
|
"learning_rate": 2.9203491779722896e-05, |
|
"loss": 0.4218, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 2.066869300911854, |
|
"grad_norm": 0.353920518082642, |
|
"learning_rate": 2.9153062819496357e-05, |
|
"loss": 0.438, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 2.0709219858156027, |
|
"grad_norm": 0.41031765540668913, |
|
"learning_rate": 2.9102560141760178e-05, |
|
"loss": 0.4474, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.0749746707193517, |
|
"grad_norm": 0.3560449349411888, |
|
"learning_rate": 2.9051984153256004e-05, |
|
"loss": 0.4474, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 2.0790273556231003, |
|
"grad_norm": 0.3737734389564037, |
|
"learning_rate": 2.900133526131588e-05, |
|
"loss": 0.4438, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 2.0830800405268493, |
|
"grad_norm": 0.3905759365363995, |
|
"learning_rate": 2.8950613873859025e-05, |
|
"loss": 0.4455, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 2.087132725430598, |
|
"grad_norm": 0.3586068139248692, |
|
"learning_rate": 2.8899820399388515e-05, |
|
"loss": 0.4246, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 2.0911854103343464, |
|
"grad_norm": 0.3939403685352425, |
|
"learning_rate": 2.8848955246988012e-05, |
|
"loss": 0.4467, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 2.0952380952380953, |
|
"grad_norm": 0.38268006746299166, |
|
"learning_rate": 2.879801882631847e-05, |
|
"loss": 0.4326, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 2.099290780141844, |
|
"grad_norm": 0.3976079893542882, |
|
"learning_rate": 2.8747011547614808e-05, |
|
"loss": 0.4519, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 2.103343465045593, |
|
"grad_norm": 0.4129467361960651, |
|
"learning_rate": 2.8695933821682635e-05, |
|
"loss": 0.4466, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 2.1073961499493414, |
|
"grad_norm": 0.4394261124473519, |
|
"learning_rate": 2.864478605989494e-05, |
|
"loss": 0.44, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 2.11144883485309, |
|
"grad_norm": 0.47510373003059053, |
|
"learning_rate": 2.8593568674188765e-05, |
|
"loss": 0.4388, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.115501519756839, |
|
"grad_norm": 0.422149260139249, |
|
"learning_rate": 2.8542282077061892e-05, |
|
"loss": 0.4411, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 2.1195542046605875, |
|
"grad_norm": 0.4490399135688369, |
|
"learning_rate": 2.8490926681569523e-05, |
|
"loss": 0.4471, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 2.1236068895643365, |
|
"grad_norm": 0.48237993127512185, |
|
"learning_rate": 2.8439502901320956e-05, |
|
"loss": 0.4446, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 2.127659574468085, |
|
"grad_norm": 0.3726192184767776, |
|
"learning_rate": 2.8388011150476237e-05, |
|
"loss": 0.4444, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 2.1317122593718336, |
|
"grad_norm": 0.44225977855558796, |
|
"learning_rate": 2.8336451843742866e-05, |
|
"loss": 0.4516, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 2.1357649442755826, |
|
"grad_norm": 0.3318267644956898, |
|
"learning_rate": 2.8284825396372387e-05, |
|
"loss": 0.448, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 2.139817629179331, |
|
"grad_norm": 0.4167815557160867, |
|
"learning_rate": 2.8233132224157132e-05, |
|
"loss": 0.4404, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 2.14387031408308, |
|
"grad_norm": 0.399117046215451, |
|
"learning_rate": 2.8181372743426805e-05, |
|
"loss": 0.4412, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 2.1479229989868287, |
|
"grad_norm": 0.42996619069159747, |
|
"learning_rate": 2.8129547371045128e-05, |
|
"loss": 0.4457, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 2.1519756838905777, |
|
"grad_norm": 0.40658830316335864, |
|
"learning_rate": 2.8077656524406534e-05, |
|
"loss": 0.4475, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.1560283687943262, |
|
"grad_norm": 0.3900252079960181, |
|
"learning_rate": 2.802570062143278e-05, |
|
"loss": 0.446, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 2.160081053698075, |
|
"grad_norm": 0.3457040130531706, |
|
"learning_rate": 2.7973680080569555e-05, |
|
"loss": 0.4524, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 2.164133738601824, |
|
"grad_norm": 0.31523937089503834, |
|
"learning_rate": 2.792159532078314e-05, |
|
"loss": 0.436, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 2.1681864235055723, |
|
"grad_norm": 0.349969402368419, |
|
"learning_rate": 2.7869446761557033e-05, |
|
"loss": 0.439, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 2.1722391084093213, |
|
"grad_norm": 0.35386101101734324, |
|
"learning_rate": 2.781723482288857e-05, |
|
"loss": 0.4391, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 2.17629179331307, |
|
"grad_norm": 0.355684435443336, |
|
"learning_rate": 2.7764959925285517e-05, |
|
"loss": 0.4276, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 2.180344478216819, |
|
"grad_norm": 0.3370386697794104, |
|
"learning_rate": 2.771262248976272e-05, |
|
"loss": 0.4387, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 2.1843971631205674, |
|
"grad_norm": 0.3227019948931399, |
|
"learning_rate": 2.7660222937838677e-05, |
|
"loss": 0.4414, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 2.188449848024316, |
|
"grad_norm": 0.37100772248145486, |
|
"learning_rate": 2.7607761691532186e-05, |
|
"loss": 0.464, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 2.192502532928065, |
|
"grad_norm": 0.34920108223892243, |
|
"learning_rate": 2.7555239173358916e-05, |
|
"loss": 0.445, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.1965552178318135, |
|
"grad_norm": 0.33438942817637946, |
|
"learning_rate": 2.7502655806328e-05, |
|
"loss": 0.4346, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 2.2006079027355625, |
|
"grad_norm": 0.338233034355839, |
|
"learning_rate": 2.7450012013938648e-05, |
|
"loss": 0.4546, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 2.204660587639311, |
|
"grad_norm": 0.3149971370650695, |
|
"learning_rate": 2.739730822017673e-05, |
|
"loss": 0.4513, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 2.2087132725430596, |
|
"grad_norm": 0.30565439280200163, |
|
"learning_rate": 2.7344544849511355e-05, |
|
"loss": 0.4537, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 2.2127659574468086, |
|
"grad_norm": 0.36768349326667193, |
|
"learning_rate": 2.7291722326891456e-05, |
|
"loss": 0.4545, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 2.216818642350557, |
|
"grad_norm": 0.3701403437624965, |
|
"learning_rate": 2.723884107774236e-05, |
|
"loss": 0.4406, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 2.220871327254306, |
|
"grad_norm": 0.36215148140741227, |
|
"learning_rate": 2.718590152796239e-05, |
|
"loss": 0.4533, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 2.2249240121580547, |
|
"grad_norm": 0.4130935454727938, |
|
"learning_rate": 2.71329041039194e-05, |
|
"loss": 0.4377, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 2.2289766970618032, |
|
"grad_norm": 0.44096410498843186, |
|
"learning_rate": 2.7079849232447357e-05, |
|
"loss": 0.4503, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 2.2330293819655522, |
|
"grad_norm": 0.29901955869727204, |
|
"learning_rate": 2.7026737340842895e-05, |
|
"loss": 0.4478, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.237082066869301, |
|
"grad_norm": 0.43729885851108696, |
|
"learning_rate": 2.697356885686189e-05, |
|
"loss": 0.4472, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 2.2411347517730498, |
|
"grad_norm": 0.3935974532733391, |
|
"learning_rate": 2.6920344208716014e-05, |
|
"loss": 0.4488, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 2.2451874366767983, |
|
"grad_norm": 0.3613505801042168, |
|
"learning_rate": 2.6867063825069252e-05, |
|
"loss": 0.4411, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 2.2492401215805473, |
|
"grad_norm": 0.35253110813727523, |
|
"learning_rate": 2.6813728135034494e-05, |
|
"loss": 0.4471, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 2.253292806484296, |
|
"grad_norm": 0.3726277670303869, |
|
"learning_rate": 2.6760337568170056e-05, |
|
"loss": 0.4376, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 2.2573454913880444, |
|
"grad_norm": 0.3075152692363883, |
|
"learning_rate": 2.6706892554476226e-05, |
|
"loss": 0.4418, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 2.2613981762917934, |
|
"grad_norm": 0.46525698386528336, |
|
"learning_rate": 2.6653393524391795e-05, |
|
"loss": 0.4431, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 2.265450861195542, |
|
"grad_norm": 0.30736152987971066, |
|
"learning_rate": 2.6599840908790592e-05, |
|
"loss": 0.449, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 2.269503546099291, |
|
"grad_norm": 0.44616953922452857, |
|
"learning_rate": 2.6546235138978028e-05, |
|
"loss": 0.4438, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 2.2735562310030395, |
|
"grad_norm": 0.31993291555988845, |
|
"learning_rate": 2.6492576646687597e-05, |
|
"loss": 0.4369, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.2776089159067885, |
|
"grad_norm": 0.43328981149113904, |
|
"learning_rate": 2.6438865864077425e-05, |
|
"loss": 0.4358, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 2.281661600810537, |
|
"grad_norm": 0.3703336479610273, |
|
"learning_rate": 2.6385103223726766e-05, |
|
"loss": 0.4587, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 2.2857142857142856, |
|
"grad_norm": 0.34035332176800803, |
|
"learning_rate": 2.6331289158632537e-05, |
|
"loss": 0.4543, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 2.2897669706180346, |
|
"grad_norm": 0.7337075215353476, |
|
"learning_rate": 2.6277424102205817e-05, |
|
"loss": 0.463, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 2.293819655521783, |
|
"grad_norm": 0.3725300488193857, |
|
"learning_rate": 2.6223508488268374e-05, |
|
"loss": 0.4289, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 2.297872340425532, |
|
"grad_norm": 0.3976758875332692, |
|
"learning_rate": 2.6169542751049148e-05, |
|
"loss": 0.4628, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 2.3019250253292807, |
|
"grad_norm": 0.5249985049781397, |
|
"learning_rate": 2.6115527325180754e-05, |
|
"loss": 0.4446, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 2.305977710233029, |
|
"grad_norm": 0.38854462570778775, |
|
"learning_rate": 2.606146264569603e-05, |
|
"loss": 0.4585, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 2.310030395136778, |
|
"grad_norm": 0.3637403502615504, |
|
"learning_rate": 2.6007349148024447e-05, |
|
"loss": 0.4336, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 2.3140830800405268, |
|
"grad_norm": 0.2952197897272316, |
|
"learning_rate": 2.5953187267988694e-05, |
|
"loss": 0.4409, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.3181357649442758, |
|
"grad_norm": 0.30950569598802063, |
|
"learning_rate": 2.5898977441801097e-05, |
|
"loss": 0.449, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 2.3221884498480243, |
|
"grad_norm": 0.280585872126244, |
|
"learning_rate": 2.584472010606015e-05, |
|
"loss": 0.4604, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 2.326241134751773, |
|
"grad_norm": 0.2882796447003011, |
|
"learning_rate": 2.5790415697746976e-05, |
|
"loss": 0.451, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 2.330293819655522, |
|
"grad_norm": 0.31414600750350985, |
|
"learning_rate": 2.5736064654221808e-05, |
|
"loss": 0.4374, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 2.3343465045592704, |
|
"grad_norm": 0.32454409259304257, |
|
"learning_rate": 2.568166741322048e-05, |
|
"loss": 0.4492, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 2.3383991894630194, |
|
"grad_norm": 0.37787661911337705, |
|
"learning_rate": 2.56272244128509e-05, |
|
"loss": 0.4149, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 2.342451874366768, |
|
"grad_norm": 0.29554876401561386, |
|
"learning_rate": 2.55727360915895e-05, |
|
"loss": 0.44, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 2.3465045592705165, |
|
"grad_norm": 0.3968092765570855, |
|
"learning_rate": 2.5518202888277734e-05, |
|
"loss": 0.4449, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 2.3505572441742655, |
|
"grad_norm": 0.32434456495073655, |
|
"learning_rate": 2.5463625242118523e-05, |
|
"loss": 0.4326, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 2.354609929078014, |
|
"grad_norm": 0.3659404647028591, |
|
"learning_rate": 2.5409003592672723e-05, |
|
"loss": 0.4337, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.358662613981763, |
|
"grad_norm": 0.32923180796301754, |
|
"learning_rate": 2.535433837985559e-05, |
|
"loss": 0.4541, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 2.3627152988855116, |
|
"grad_norm": 0.35806497270649884, |
|
"learning_rate": 2.529963004393324e-05, |
|
"loss": 0.4662, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 2.3667679837892606, |
|
"grad_norm": 0.33209025811679616, |
|
"learning_rate": 2.524487902551908e-05, |
|
"loss": 0.4544, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 2.370820668693009, |
|
"grad_norm": 0.3132716577974625, |
|
"learning_rate": 2.519008576557029e-05, |
|
"loss": 0.441, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 2.3748733535967577, |
|
"grad_norm": 0.3189992924541878, |
|
"learning_rate": 2.5135250705384254e-05, |
|
"loss": 0.444, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 2.3789260385005067, |
|
"grad_norm": 0.3010755766283583, |
|
"learning_rate": 2.5080374286595007e-05, |
|
"loss": 0.4431, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 2.382978723404255, |
|
"grad_norm": 0.29915708172104893, |
|
"learning_rate": 2.5025456951169677e-05, |
|
"loss": 0.4556, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 2.387031408308004, |
|
"grad_norm": 0.31512823751757624, |
|
"learning_rate": 2.4970499141404942e-05, |
|
"loss": 0.4452, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 2.3910840932117527, |
|
"grad_norm": 0.3008110070225229, |
|
"learning_rate": 2.491550129992345e-05, |
|
"loss": 0.4551, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 2.3951367781155017, |
|
"grad_norm": 0.30066378537557736, |
|
"learning_rate": 2.486046386967024e-05, |
|
"loss": 0.4527, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.3991894630192503, |
|
"grad_norm": 0.31900805950846545, |
|
"learning_rate": 2.4805387293909214e-05, |
|
"loss": 0.4453, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 2.403242147922999, |
|
"grad_norm": 0.32272666830661023, |
|
"learning_rate": 2.4750272016219552e-05, |
|
"loss": 0.4463, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 2.407294832826748, |
|
"grad_norm": 0.29742438696928303, |
|
"learning_rate": 2.4695118480492114e-05, |
|
"loss": 0.4394, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 2.4113475177304964, |
|
"grad_norm": 0.34476301038890383, |
|
"learning_rate": 2.4639927130925898e-05, |
|
"loss": 0.4355, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 2.4154002026342454, |
|
"grad_norm": 0.2871740574842531, |
|
"learning_rate": 2.458469841202444e-05, |
|
"loss": 0.4494, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 2.419452887537994, |
|
"grad_norm": 0.3655792431410782, |
|
"learning_rate": 2.452943276859226e-05, |
|
"loss": 0.4321, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 2.4235055724417425, |
|
"grad_norm": 0.3207807851871291, |
|
"learning_rate": 2.447413064573125e-05, |
|
"loss": 0.4602, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 2.4275582573454915, |
|
"grad_norm": 0.3694406663567429, |
|
"learning_rate": 2.4418792488837095e-05, |
|
"loss": 0.4499, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 2.43161094224924, |
|
"grad_norm": 0.36992645886717623, |
|
"learning_rate": 2.4363418743595713e-05, |
|
"loss": 0.4364, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 2.435663627152989, |
|
"grad_norm": 0.33592862389406797, |
|
"learning_rate": 2.430800985597963e-05, |
|
"loss": 0.4466, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.4397163120567376, |
|
"grad_norm": 0.3647960976857997, |
|
"learning_rate": 2.4252566272244415e-05, |
|
"loss": 0.444, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 2.443768996960486, |
|
"grad_norm": 0.2979463577093382, |
|
"learning_rate": 2.4197088438925063e-05, |
|
"loss": 0.4498, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 2.447821681864235, |
|
"grad_norm": 1.4707392002836646, |
|
"learning_rate": 2.4141576802832417e-05, |
|
"loss": 0.5017, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 2.4518743667679836, |
|
"grad_norm": 0.36738779001057814, |
|
"learning_rate": 2.408603181104957e-05, |
|
"loss": 0.4488, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 2.4559270516717326, |
|
"grad_norm": 0.35488189599425357, |
|
"learning_rate": 2.4030453910928245e-05, |
|
"loss": 0.4467, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 2.459979736575481, |
|
"grad_norm": 0.34225882506672584, |
|
"learning_rate": 2.397484355008521e-05, |
|
"loss": 0.4391, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 2.46403242147923, |
|
"grad_norm": 0.3708517603180246, |
|
"learning_rate": 2.3919201176398662e-05, |
|
"loss": 0.4524, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 2.4680851063829787, |
|
"grad_norm": 0.3573057332459475, |
|
"learning_rate": 2.3863527238004633e-05, |
|
"loss": 0.4487, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 2.4721377912867273, |
|
"grad_norm": 0.39226646172926816, |
|
"learning_rate": 2.380782218329337e-05, |
|
"loss": 0.4475, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 2.4761904761904763, |
|
"grad_norm": 0.4056394488334609, |
|
"learning_rate": 2.3752086460905725e-05, |
|
"loss": 0.4367, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.480243161094225, |
|
"grad_norm": 0.3824361039707399, |
|
"learning_rate": 2.3696320519729544e-05, |
|
"loss": 0.4487, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 2.484295845997974, |
|
"grad_norm": 0.43321690947275676, |
|
"learning_rate": 2.3640524808896045e-05, |
|
"loss": 0.4316, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 2.4883485309017224, |
|
"grad_norm": 0.32906114150191906, |
|
"learning_rate": 2.3584699777776222e-05, |
|
"loss": 0.4311, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 2.4924012158054714, |
|
"grad_norm": 0.4135064805641933, |
|
"learning_rate": 2.3528845875977195e-05, |
|
"loss": 0.4367, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 2.49645390070922, |
|
"grad_norm": 0.3075867414052995, |
|
"learning_rate": 2.3472963553338614e-05, |
|
"loss": 0.4509, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 2.5005065856129685, |
|
"grad_norm": 0.36943563398524304, |
|
"learning_rate": 2.341705325992901e-05, |
|
"loss": 0.4467, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 2.5045592705167175, |
|
"grad_norm": 0.32497838346417, |
|
"learning_rate": 2.336111544604222e-05, |
|
"loss": 0.4531, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 2.508611955420466, |
|
"grad_norm": 0.34607269225396814, |
|
"learning_rate": 2.33051505621937e-05, |
|
"loss": 0.4518, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 2.512664640324215, |
|
"grad_norm": 0.3532800136132749, |
|
"learning_rate": 2.324915905911693e-05, |
|
"loss": 0.4499, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 2.5167173252279635, |
|
"grad_norm": 0.3469895859362161, |
|
"learning_rate": 2.319314138775977e-05, |
|
"loss": 0.4469, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.520770010131712, |
|
"grad_norm": 0.35303597346463045, |
|
"learning_rate": 2.3137097999280856e-05, |
|
"loss": 0.4631, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 2.524822695035461, |
|
"grad_norm": 0.33306931131371176, |
|
"learning_rate": 2.308102934504593e-05, |
|
"loss": 0.4483, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 2.5288753799392096, |
|
"grad_norm": 0.39904517279416585, |
|
"learning_rate": 2.3024935876624222e-05, |
|
"loss": 0.4576, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 2.5329280648429586, |
|
"grad_norm": 0.3153181827968658, |
|
"learning_rate": 2.2968818045784813e-05, |
|
"loss": 0.4367, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 2.536980749746707, |
|
"grad_norm": 0.3781010605339802, |
|
"learning_rate": 2.2912676304493006e-05, |
|
"loss": 0.4457, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 2.5410334346504557, |
|
"grad_norm": 0.29391441835821075, |
|
"learning_rate": 2.2856511104906668e-05, |
|
"loss": 0.452, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 2.5450861195542047, |
|
"grad_norm": 0.36242613095922027, |
|
"learning_rate": 2.2800322899372586e-05, |
|
"loss": 0.442, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 2.5491388044579533, |
|
"grad_norm": 0.2911216540986173, |
|
"learning_rate": 2.2744112140422844e-05, |
|
"loss": 0.4544, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 2.5531914893617023, |
|
"grad_norm": 0.33314197778100546, |
|
"learning_rate": 2.2687879280771177e-05, |
|
"loss": 0.4429, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 2.557244174265451, |
|
"grad_norm": 0.3373905077786125, |
|
"learning_rate": 2.26316247733093e-05, |
|
"loss": 0.4407, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.5612968591691994, |
|
"grad_norm": 0.29360894161075535, |
|
"learning_rate": 2.257534907110328e-05, |
|
"loss": 0.452, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 2.5653495440729484, |
|
"grad_norm": 0.3435933567924771, |
|
"learning_rate": 2.2519052627389882e-05, |
|
"loss": 0.4229, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 2.569402228976697, |
|
"grad_norm": 0.31099989928450195, |
|
"learning_rate": 2.246273589557294e-05, |
|
"loss": 0.4467, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 2.573454913880446, |
|
"grad_norm": 0.33590756174056147, |
|
"learning_rate": 2.240639932921966e-05, |
|
"loss": 0.4491, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 2.5775075987841944, |
|
"grad_norm": 0.3174088406342149, |
|
"learning_rate": 2.2350043382056995e-05, |
|
"loss": 0.434, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 2.581560283687943, |
|
"grad_norm": 0.3396256810400546, |
|
"learning_rate": 2.2293668507968015e-05, |
|
"loss": 0.4386, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 2.585612968591692, |
|
"grad_norm": 0.34941651772778226, |
|
"learning_rate": 2.2237275160988186e-05, |
|
"loss": 0.4453, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 2.589665653495441, |
|
"grad_norm": 0.3139772349811781, |
|
"learning_rate": 2.2180863795301787e-05, |
|
"loss": 0.4537, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 2.5937183383991895, |
|
"grad_norm": 0.33967533783999143, |
|
"learning_rate": 2.212443486523819e-05, |
|
"loss": 0.4251, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 2.597771023302938, |
|
"grad_norm": 0.2934155719201838, |
|
"learning_rate": 2.2067988825268243e-05, |
|
"loss": 0.4341, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.601823708206687, |
|
"grad_norm": 0.3261770300733525, |
|
"learning_rate": 2.2011526130000596e-05, |
|
"loss": 0.4258, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 2.6058763931104356, |
|
"grad_norm": 0.32268330268693696, |
|
"learning_rate": 2.1955047234178038e-05, |
|
"loss": 0.461, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 2.6099290780141846, |
|
"grad_norm": 0.33757751078098575, |
|
"learning_rate": 2.1898552592673825e-05, |
|
"loss": 0.4452, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 2.613981762917933, |
|
"grad_norm": 0.2876189952836773, |
|
"learning_rate": 2.184204266048803e-05, |
|
"loss": 0.4466, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 2.6180344478216817, |
|
"grad_norm": 0.3073735223765011, |
|
"learning_rate": 2.1785517892743887e-05, |
|
"loss": 0.4615, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 2.6220871327254307, |
|
"grad_norm": 0.3214758388394934, |
|
"learning_rate": 2.17289787446841e-05, |
|
"loss": 0.444, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 2.6261398176291793, |
|
"grad_norm": 0.27524340088033705, |
|
"learning_rate": 2.1672425671667198e-05, |
|
"loss": 0.4447, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 2.6301925025329282, |
|
"grad_norm": 0.29290924979770494, |
|
"learning_rate": 2.161585912916385e-05, |
|
"loss": 0.4572, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 2.634245187436677, |
|
"grad_norm": 0.33879773637830063, |
|
"learning_rate": 2.1559279572753214e-05, |
|
"loss": 0.4385, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 2.6382978723404253, |
|
"grad_norm": 0.3068252899768846, |
|
"learning_rate": 2.1502687458119268e-05, |
|
"loss": 0.4497, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.6423505572441743, |
|
"grad_norm": 0.35991325970743543, |
|
"learning_rate": 2.1446083241047116e-05, |
|
"loss": 0.4592, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 2.646403242147923, |
|
"grad_norm": 0.3058493977206401, |
|
"learning_rate": 2.1389467377419333e-05, |
|
"loss": 0.4611, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 2.650455927051672, |
|
"grad_norm": 0.32082623202886773, |
|
"learning_rate": 2.133284032321232e-05, |
|
"loss": 0.4478, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 2.6545086119554204, |
|
"grad_norm": 0.31452109716194143, |
|
"learning_rate": 2.1276202534492566e-05, |
|
"loss": 0.4377, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 2.658561296859169, |
|
"grad_norm": 0.3094283373019122, |
|
"learning_rate": 2.121955446741306e-05, |
|
"loss": 0.4488, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 2.662613981762918, |
|
"grad_norm": 0.30272795675876885, |
|
"learning_rate": 2.1162896578209517e-05, |
|
"loss": 0.4382, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 2.6666666666666665, |
|
"grad_norm": 0.3273500290740797, |
|
"learning_rate": 2.1106229323196813e-05, |
|
"loss": 0.4391, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 2.6707193515704155, |
|
"grad_norm": 0.31595633356171093, |
|
"learning_rate": 2.1049553158765214e-05, |
|
"loss": 0.4444, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 2.674772036474164, |
|
"grad_norm": 0.3439033188967827, |
|
"learning_rate": 2.0992868541376764e-05, |
|
"loss": 0.4346, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 2.6788247213779126, |
|
"grad_norm": 0.30719713157150863, |
|
"learning_rate": 2.093617592756158e-05, |
|
"loss": 0.4463, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.6828774062816616, |
|
"grad_norm": 0.35128633744959337, |
|
"learning_rate": 2.0879475773914167e-05, |
|
"loss": 0.4491, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 2.6869300911854106, |
|
"grad_norm": 0.30330176869223924, |
|
"learning_rate": 2.082276853708978e-05, |
|
"loss": 0.4349, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 2.690982776089159, |
|
"grad_norm": 0.3495879458067684, |
|
"learning_rate": 2.076605467380071e-05, |
|
"loss": 0.4423, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 2.6950354609929077, |
|
"grad_norm": 0.3512911574405841, |
|
"learning_rate": 2.0709334640812613e-05, |
|
"loss": 0.464, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 2.6990881458966567, |
|
"grad_norm": 0.34062616869990525, |
|
"learning_rate": 2.0652608894940824e-05, |
|
"loss": 0.4541, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 2.7031408308004052, |
|
"grad_norm": 0.37044441134231904, |
|
"learning_rate": 2.0595877893046722e-05, |
|
"loss": 0.436, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 2.7071935157041542, |
|
"grad_norm": 0.35347530839149116, |
|
"learning_rate": 2.0539142092033985e-05, |
|
"loss": 0.4403, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 2.711246200607903, |
|
"grad_norm": 0.33036433030140894, |
|
"learning_rate": 2.048240194884496e-05, |
|
"loss": 0.4525, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 2.7152988855116513, |
|
"grad_norm": 0.3328539221896754, |
|
"learning_rate": 2.042565792045695e-05, |
|
"loss": 0.4503, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 2.7193515704154003, |
|
"grad_norm": 0.3164094912656424, |
|
"learning_rate": 2.036891046387857e-05, |
|
"loss": 0.4342, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.723404255319149, |
|
"grad_norm": 0.37646322469939697, |
|
"learning_rate": 2.0312160036146036e-05, |
|
"loss": 0.4407, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 2.727456940222898, |
|
"grad_norm": 0.3513642160258712, |
|
"learning_rate": 2.025540709431948e-05, |
|
"loss": 0.4348, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 2.7315096251266464, |
|
"grad_norm": 0.36501632566818276, |
|
"learning_rate": 2.0198652095479298e-05, |
|
"loss": 0.4465, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 2.735562310030395, |
|
"grad_norm": 0.3416589340750122, |
|
"learning_rate": 2.014189549672245e-05, |
|
"loss": 0.4482, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 2.739614994934144, |
|
"grad_norm": 0.3007728333520319, |
|
"learning_rate": 2.0085137755158776e-05, |
|
"loss": 0.4565, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 2.7436676798378925, |
|
"grad_norm": 0.30824599172832745, |
|
"learning_rate": 2.0028379327907327e-05, |
|
"loss": 0.4447, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 2.7477203647416415, |
|
"grad_norm": 0.3451431554573569, |
|
"learning_rate": 1.9971620672092676e-05, |
|
"loss": 0.4434, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 2.75177304964539, |
|
"grad_norm": 0.33097863538793304, |
|
"learning_rate": 1.991486224484123e-05, |
|
"loss": 0.4476, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 2.7558257345491386, |
|
"grad_norm": 0.29961467654858276, |
|
"learning_rate": 1.985810450327756e-05, |
|
"loss": 0.4478, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 2.7598784194528876, |
|
"grad_norm": 0.3403203420809795, |
|
"learning_rate": 1.9801347904520706e-05, |
|
"loss": 0.4557, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.763931104356636, |
|
"grad_norm": 0.3156827715850812, |
|
"learning_rate": 1.974459290568053e-05, |
|
"loss": 0.4569, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 2.767983789260385, |
|
"grad_norm": 0.29769558939758445, |
|
"learning_rate": 1.968783996385397e-05, |
|
"loss": 0.4409, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 2.7720364741641337, |
|
"grad_norm": 0.29791217609480064, |
|
"learning_rate": 1.963108953612143e-05, |
|
"loss": 0.4539, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 2.7760891590678822, |
|
"grad_norm": 0.3229550052508969, |
|
"learning_rate": 1.9574342079543056e-05, |
|
"loss": 0.4562, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 2.780141843971631, |
|
"grad_norm": 0.28863112206548475, |
|
"learning_rate": 1.9517598051155046e-05, |
|
"loss": 0.4273, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 2.78419452887538, |
|
"grad_norm": 0.29913085611783935, |
|
"learning_rate": 1.9460857907966025e-05, |
|
"loss": 0.4468, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 2.7882472137791288, |
|
"grad_norm": 0.30504870769965775, |
|
"learning_rate": 1.9404122106953285e-05, |
|
"loss": 0.4366, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 2.7922998986828773, |
|
"grad_norm": 0.3183548687307993, |
|
"learning_rate": 1.9347391105059176e-05, |
|
"loss": 0.4522, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 2.7963525835866263, |
|
"grad_norm": 0.3019815071253538, |
|
"learning_rate": 1.92906653591874e-05, |
|
"loss": 0.4474, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 2.800405268490375, |
|
"grad_norm": 0.30336358210702535, |
|
"learning_rate": 1.9233945326199295e-05, |
|
"loss": 0.4522, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.804457953394124, |
|
"grad_norm": 0.30822194354763655, |
|
"learning_rate": 1.917723146291022e-05, |
|
"loss": 0.4348, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 2.8085106382978724, |
|
"grad_norm": 0.3303242014782378, |
|
"learning_rate": 1.912052422608584e-05, |
|
"loss": 0.4307, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 2.812563323201621, |
|
"grad_norm": 0.29794367899266655, |
|
"learning_rate": 1.9063824072438428e-05, |
|
"loss": 0.4341, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 2.81661600810537, |
|
"grad_norm": 0.3174664037592215, |
|
"learning_rate": 1.9007131458623246e-05, |
|
"loss": 0.4393, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 2.8206686930091185, |
|
"grad_norm": 0.30017042749364053, |
|
"learning_rate": 1.895044684123479e-05, |
|
"loss": 0.4427, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 2.8247213779128675, |
|
"grad_norm": 0.3288068825208793, |
|
"learning_rate": 1.8893770676803194e-05, |
|
"loss": 0.4549, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 2.828774062816616, |
|
"grad_norm": 0.2759460699432527, |
|
"learning_rate": 1.8837103421790486e-05, |
|
"loss": 0.4463, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 2.8328267477203646, |
|
"grad_norm": 0.3079493824829927, |
|
"learning_rate": 1.8780445532586952e-05, |
|
"loss": 0.4336, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 2.8368794326241136, |
|
"grad_norm": 0.2988597070602149, |
|
"learning_rate": 1.872379746550743e-05, |
|
"loss": 0.4521, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 2.840932117527862, |
|
"grad_norm": 0.2990122406735027, |
|
"learning_rate": 1.866715967678769e-05, |
|
"loss": 0.4495, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.844984802431611, |
|
"grad_norm": 0.3206623590956083, |
|
"learning_rate": 1.861053262258067e-05, |
|
"loss": 0.4541, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 2.8490374873353597, |
|
"grad_norm": 0.3017499744305013, |
|
"learning_rate": 1.8553916758952897e-05, |
|
"loss": 0.4299, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 2.853090172239108, |
|
"grad_norm": 0.34254757571502004, |
|
"learning_rate": 1.8497312541880735e-05, |
|
"loss": 0.4282, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 2.857142857142857, |
|
"grad_norm": 0.3095873526921389, |
|
"learning_rate": 1.8440720427246786e-05, |
|
"loss": 0.4473, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 2.8611955420466058, |
|
"grad_norm": 0.3295921806370062, |
|
"learning_rate": 1.8384140870836157e-05, |
|
"loss": 0.4427, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 2.8652482269503547, |
|
"grad_norm": 0.3366039141201994, |
|
"learning_rate": 1.8327574328332806e-05, |
|
"loss": 0.4477, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 2.8693009118541033, |
|
"grad_norm": 0.32572680530478754, |
|
"learning_rate": 1.8271021255315906e-05, |
|
"loss": 0.4455, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 2.873353596757852, |
|
"grad_norm": 0.339333313576772, |
|
"learning_rate": 1.8214482107256117e-05, |
|
"loss": 0.4467, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 2.877406281661601, |
|
"grad_norm": 0.3395683983420118, |
|
"learning_rate": 1.8157957339511968e-05, |
|
"loss": 0.4385, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 2.8814589665653494, |
|
"grad_norm": 0.3036800446427919, |
|
"learning_rate": 1.8101447407326182e-05, |
|
"loss": 0.4377, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.8855116514690984, |
|
"grad_norm": 0.30107971129121613, |
|
"learning_rate": 1.8044952765821966e-05, |
|
"loss": 0.4422, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 2.889564336372847, |
|
"grad_norm": 0.3148485900685422, |
|
"learning_rate": 1.7988473869999407e-05, |
|
"loss": 0.4439, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 2.8936170212765955, |
|
"grad_norm": 0.2950791393623461, |
|
"learning_rate": 1.7932011174731764e-05, |
|
"loss": 0.442, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 2.8976697061803445, |
|
"grad_norm": 0.28569760056665355, |
|
"learning_rate": 1.7875565134761817e-05, |
|
"loss": 0.4617, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 2.9017223910840935, |
|
"grad_norm": 0.3015151902480177, |
|
"learning_rate": 1.7819136204698226e-05, |
|
"loss": 0.4417, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 2.905775075987842, |
|
"grad_norm": 0.29907971014512114, |
|
"learning_rate": 1.776272483901182e-05, |
|
"loss": 0.4414, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 2.9098277608915906, |
|
"grad_norm": 0.29303278600781346, |
|
"learning_rate": 1.7706331492031995e-05, |
|
"loss": 0.4458, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 2.9138804457953396, |
|
"grad_norm": 0.3352112405041721, |
|
"learning_rate": 1.764995661794301e-05, |
|
"loss": 0.4448, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 2.917933130699088, |
|
"grad_norm": 0.29192361821003954, |
|
"learning_rate": 1.759360067078035e-05, |
|
"loss": 0.4402, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 2.921985815602837, |
|
"grad_norm": 0.3295767993291945, |
|
"learning_rate": 1.7537264104427064e-05, |
|
"loss": 0.4411, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.9260385005065856, |
|
"grad_norm": 0.27014651494193037, |
|
"learning_rate": 1.748094737261012e-05, |
|
"loss": 0.4511, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 2.930091185410334, |
|
"grad_norm": 0.29532333503549385, |
|
"learning_rate": 1.7424650928896726e-05, |
|
"loss": 0.4392, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 2.934143870314083, |
|
"grad_norm": 0.29101374290441184, |
|
"learning_rate": 1.7368375226690712e-05, |
|
"loss": 0.4373, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 2.9381965552178317, |
|
"grad_norm": 0.2812497261151677, |
|
"learning_rate": 1.731212071922883e-05, |
|
"loss": 0.446, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 2.9422492401215807, |
|
"grad_norm": 0.34742649925238944, |
|
"learning_rate": 1.7255887859577156e-05, |
|
"loss": 0.4411, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 2.9463019250253293, |
|
"grad_norm": 0.2660535524593765, |
|
"learning_rate": 1.7199677100627427e-05, |
|
"loss": 0.4602, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 2.950354609929078, |
|
"grad_norm": 0.3596071028043233, |
|
"learning_rate": 1.7143488895093343e-05, |
|
"loss": 0.4479, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 2.954407294832827, |
|
"grad_norm": 0.2771537138202783, |
|
"learning_rate": 1.7087323695506994e-05, |
|
"loss": 0.4574, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 2.9584599797365754, |
|
"grad_norm": 0.3106087990541143, |
|
"learning_rate": 1.7031181954215194e-05, |
|
"loss": 0.4539, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 2.9625126646403244, |
|
"grad_norm": 0.31238413735800574, |
|
"learning_rate": 1.6975064123375788e-05, |
|
"loss": 0.444, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.966565349544073, |
|
"grad_norm": 0.26556593177038745, |
|
"learning_rate": 1.6918970654954084e-05, |
|
"loss": 0.434, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 2.9706180344478215, |
|
"grad_norm": 0.29900316896919654, |
|
"learning_rate": 1.686290200071915e-05, |
|
"loss": 0.4497, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 2.9746707193515705, |
|
"grad_norm": 0.2914092049199421, |
|
"learning_rate": 1.6806858612240234e-05, |
|
"loss": 0.436, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 2.978723404255319, |
|
"grad_norm": 0.2665007332908628, |
|
"learning_rate": 1.6750840940883078e-05, |
|
"loss": 0.4468, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 2.982776089159068, |
|
"grad_norm": 0.2578489529933866, |
|
"learning_rate": 1.6694849437806305e-05, |
|
"loss": 0.4583, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 2.9868287740628165, |
|
"grad_norm": 0.2949833436060018, |
|
"learning_rate": 1.663888455395778e-05, |
|
"loss": 0.4309, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 2.990881458966565, |
|
"grad_norm": 0.29864411839978644, |
|
"learning_rate": 1.6582946740070995e-05, |
|
"loss": 0.4378, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 2.994934143870314, |
|
"grad_norm": 0.30509975133692285, |
|
"learning_rate": 1.6527036446661396e-05, |
|
"loss": 0.4507, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 3.0010131712259374, |
|
"grad_norm": 0.2904486926820399, |
|
"learning_rate": 1.6471154124022818e-05, |
|
"loss": 0.4434, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 3.005065856129686, |
|
"grad_norm": 0.424593221842747, |
|
"learning_rate": 1.6415300222223788e-05, |
|
"loss": 0.3968, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 3.0091185410334345, |
|
"grad_norm": 0.3516938590175839, |
|
"learning_rate": 1.6359475191103958e-05, |
|
"loss": 0.3781, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 3.0131712259371835, |
|
"grad_norm": 0.6150739562559858, |
|
"learning_rate": 1.6303679480270466e-05, |
|
"loss": 0.3683, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 3.017223910840932, |
|
"grad_norm": 0.4200938480292203, |
|
"learning_rate": 1.624791353909428e-05, |
|
"loss": 0.3862, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 3.021276595744681, |
|
"grad_norm": 0.4530342634633421, |
|
"learning_rate": 1.619217781670663e-05, |
|
"loss": 0.4055, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 3.0253292806484295, |
|
"grad_norm": 0.4177319884095146, |
|
"learning_rate": 1.6136472761995373e-05, |
|
"loss": 0.3678, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 3.0293819655521785, |
|
"grad_norm": 0.4379591988524175, |
|
"learning_rate": 1.608079882360134e-05, |
|
"loss": 0.368, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 3.033434650455927, |
|
"grad_norm": 0.44141483682509264, |
|
"learning_rate": 1.60251564499148e-05, |
|
"loss": 0.3793, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 3.0374873353596756, |
|
"grad_norm": 0.44547407014593793, |
|
"learning_rate": 1.596954608907176e-05, |
|
"loss": 0.3757, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 3.0415400202634246, |
|
"grad_norm": 0.34663492303163934, |
|
"learning_rate": 1.591396818895043e-05, |
|
"loss": 0.377, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 3.045592705167173, |
|
"grad_norm": 0.4130705246369375, |
|
"learning_rate": 1.585842319716759e-05, |
|
"loss": 0.3754, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 3.049645390070922, |
|
"grad_norm": 0.3542988434115936, |
|
"learning_rate": 1.5802911561074944e-05, |
|
"loss": 0.3772, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 3.0536980749746707, |
|
"grad_norm": 0.3840931272267641, |
|
"learning_rate": 1.5747433727755595e-05, |
|
"loss": 0.3782, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 3.0577507598784193, |
|
"grad_norm": 0.3640354547040424, |
|
"learning_rate": 1.5691990144020376e-05, |
|
"loss": 0.3765, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 3.0618034447821683, |
|
"grad_norm": 0.31522179710902176, |
|
"learning_rate": 1.5636581256404297e-05, |
|
"loss": 0.3665, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 3.065856129685917, |
|
"grad_norm": 0.3707685956522802, |
|
"learning_rate": 1.558120751116291e-05, |
|
"loss": 0.3742, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 3.069908814589666, |
|
"grad_norm": 0.3217415553060016, |
|
"learning_rate": 1.552586935426876e-05, |
|
"loss": 0.3841, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 3.0739614994934144, |
|
"grad_norm": 0.3258575564881046, |
|
"learning_rate": 1.547056723140774e-05, |
|
"loss": 0.3825, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 3.078014184397163, |
|
"grad_norm": 0.3478935300480006, |
|
"learning_rate": 1.5415301587975565e-05, |
|
"loss": 0.3673, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 3.082066869300912, |
|
"grad_norm": 0.30221546863403326, |
|
"learning_rate": 1.536007286907411e-05, |
|
"loss": 0.376, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 3.0861195542046604, |
|
"grad_norm": 0.3780320676399285, |
|
"learning_rate": 1.5304881519507896e-05, |
|
"loss": 0.3925, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 3.0901722391084094, |
|
"grad_norm": 0.2804951652470931, |
|
"learning_rate": 1.5249727983780453e-05, |
|
"loss": 0.3765, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 3.094224924012158, |
|
"grad_norm": 0.345845021631185, |
|
"learning_rate": 1.5194612706090786e-05, |
|
"loss": 0.3787, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 3.098277608915907, |
|
"grad_norm": 0.3049223900579112, |
|
"learning_rate": 1.5139536130329771e-05, |
|
"loss": 0.37, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 3.1023302938196555, |
|
"grad_norm": 0.29705894965763635, |
|
"learning_rate": 1.508449870007656e-05, |
|
"loss": 0.387, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 3.106382978723404, |
|
"grad_norm": 0.30195231262870714, |
|
"learning_rate": 1.5029500858595056e-05, |
|
"loss": 0.3799, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 3.110435663627153, |
|
"grad_norm": 0.30220918166568445, |
|
"learning_rate": 1.4974543048830328e-05, |
|
"loss": 0.3702, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 3.1144883485309016, |
|
"grad_norm": 0.3268285877005793, |
|
"learning_rate": 1.4919625713405e-05, |
|
"loss": 0.3589, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 3.1185410334346506, |
|
"grad_norm": 0.3002920237627172, |
|
"learning_rate": 1.4864749294615756e-05, |
|
"loss": 0.3962, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 3.122593718338399, |
|
"grad_norm": 0.3107827545607655, |
|
"learning_rate": 1.4809914234429716e-05, |
|
"loss": 0.3799, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 3.126646403242148, |
|
"grad_norm": 0.3055912508616054, |
|
"learning_rate": 1.4755120974480923e-05, |
|
"loss": 0.3793, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 3.1306990881458967, |
|
"grad_norm": 0.3152214387272536, |
|
"learning_rate": 1.4700369956066771e-05, |
|
"loss": 0.3773, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 3.1347517730496453, |
|
"grad_norm": 0.3073789019687832, |
|
"learning_rate": 1.4645661620144413e-05, |
|
"loss": 0.3867, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 3.1388044579533942, |
|
"grad_norm": 0.31435177793268887, |
|
"learning_rate": 1.4590996407327284e-05, |
|
"loss": 0.3736, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 3.142857142857143, |
|
"grad_norm": 0.29461137758717176, |
|
"learning_rate": 1.4536374757881487e-05, |
|
"loss": 0.3877, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 3.146909827760892, |
|
"grad_norm": 0.28198099001639415, |
|
"learning_rate": 1.4481797111722271e-05, |
|
"loss": 0.4069, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 3.1509625126646403, |
|
"grad_norm": 0.293567171721376, |
|
"learning_rate": 1.4427263908410507e-05, |
|
"loss": 0.3772, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 3.155015197568389, |
|
"grad_norm": 0.2986529484683073, |
|
"learning_rate": 1.4372775587149108e-05, |
|
"loss": 0.3918, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 3.159067882472138, |
|
"grad_norm": 0.2818015213233193, |
|
"learning_rate": 1.4318332586779522e-05, |
|
"loss": 0.3964, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 3.1631205673758864, |
|
"grad_norm": 0.3074096888569812, |
|
"learning_rate": 1.4263935345778202e-05, |
|
"loss": 0.4049, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 3.1671732522796354, |
|
"grad_norm": 0.25953109238577915, |
|
"learning_rate": 1.420958430225303e-05, |
|
"loss": 0.3794, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 3.171225937183384, |
|
"grad_norm": 0.31294619245811045, |
|
"learning_rate": 1.415527989393985e-05, |
|
"loss": 0.3548, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 3.1752786220871325, |
|
"grad_norm": 0.27614100810599057, |
|
"learning_rate": 1.410102255819891e-05, |
|
"loss": 0.3736, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 3.1793313069908815, |
|
"grad_norm": 0.28708403168863017, |
|
"learning_rate": 1.404681273201131e-05, |
|
"loss": 0.3757, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 3.18338399189463, |
|
"grad_norm": 0.3075666016417243, |
|
"learning_rate": 1.399265085197556e-05, |
|
"loss": 0.3912, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 3.187436676798379, |
|
"grad_norm": 0.2955733847675601, |
|
"learning_rate": 1.393853735430398e-05, |
|
"loss": 0.3641, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 3.1914893617021276, |
|
"grad_norm": 0.29322215407182345, |
|
"learning_rate": 1.3884472674819246e-05, |
|
"loss": 0.3766, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 3.1955420466058766, |
|
"grad_norm": 0.2849763688385754, |
|
"learning_rate": 1.3830457248950864e-05, |
|
"loss": 0.3843, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 3.199594731509625, |
|
"grad_norm": 0.2974370891380051, |
|
"learning_rate": 1.377649151173163e-05, |
|
"loss": 0.3871, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 3.2036474164133737, |
|
"grad_norm": 0.29433329535090247, |
|
"learning_rate": 1.3722575897794181e-05, |
|
"loss": 0.3956, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 3.2077001013171227, |
|
"grad_norm": 0.2907393464166778, |
|
"learning_rate": 1.3668710841367472e-05, |
|
"loss": 0.3816, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 3.2117527862208712, |
|
"grad_norm": 0.30027682817655954, |
|
"learning_rate": 1.361489677627324e-05, |
|
"loss": 0.3756, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 3.2158054711246202, |
|
"grad_norm": 0.2847557925785226, |
|
"learning_rate": 1.3561134135922585e-05, |
|
"loss": 0.3791, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 3.219858156028369, |
|
"grad_norm": 0.28216159575831395, |
|
"learning_rate": 1.350742335331241e-05, |
|
"loss": 0.386, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 3.2239108409321173, |
|
"grad_norm": 0.2710375038479823, |
|
"learning_rate": 1.345376486102198e-05, |
|
"loss": 0.3735, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 3.2279635258358663, |
|
"grad_norm": 0.28059097426665736, |
|
"learning_rate": 1.3400159091209414e-05, |
|
"loss": 0.3768, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 3.232016210739615, |
|
"grad_norm": 0.2723536413524358, |
|
"learning_rate": 1.3346606475608216e-05, |
|
"loss": 0.3842, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 3.236068895643364, |
|
"grad_norm": 0.2853816952220937, |
|
"learning_rate": 1.3293107445523781e-05, |
|
"loss": 0.3787, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 3.2401215805471124, |
|
"grad_norm": 0.2959084383207551, |
|
"learning_rate": 1.3239662431829949e-05, |
|
"loss": 0.3918, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 3.2441742654508614, |
|
"grad_norm": 0.2766603098804154, |
|
"learning_rate": 1.3186271864965509e-05, |
|
"loss": 0.3624, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 3.24822695035461, |
|
"grad_norm": 0.3026042150857366, |
|
"learning_rate": 1.3132936174930756e-05, |
|
"loss": 0.3621, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 3.2522796352583585, |
|
"grad_norm": 0.2742561487743748, |
|
"learning_rate": 1.3079655791283995e-05, |
|
"loss": 0.3893, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 3.2563323201621075, |
|
"grad_norm": 0.2760375628958661, |
|
"learning_rate": 1.3026431143138108e-05, |
|
"loss": 0.3922, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 3.260385005065856, |
|
"grad_norm": 0.28813637507981776, |
|
"learning_rate": 1.2973262659157114e-05, |
|
"loss": 0.4042, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 3.264437689969605, |
|
"grad_norm": 0.27877541883218393, |
|
"learning_rate": 1.2920150767552651e-05, |
|
"loss": 0.3561, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 3.2684903748733536, |
|
"grad_norm": 0.2731620905857762, |
|
"learning_rate": 1.2867095896080607e-05, |
|
"loss": 0.3785, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 3.272543059777102, |
|
"grad_norm": 0.2961690620105766, |
|
"learning_rate": 1.2814098472037612e-05, |
|
"loss": 0.3767, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 3.276595744680851, |
|
"grad_norm": 0.29509104047589546, |
|
"learning_rate": 1.276115892225764e-05, |
|
"loss": 0.3667, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 3.2806484295845997, |
|
"grad_norm": 0.27336537556697477, |
|
"learning_rate": 1.2708277673108555e-05, |
|
"loss": 0.387, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 3.2847011144883487, |
|
"grad_norm": 0.30747567351227856, |
|
"learning_rate": 1.2655455150488649e-05, |
|
"loss": 0.3674, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 3.288753799392097, |
|
"grad_norm": 0.29696171125542814, |
|
"learning_rate": 1.2602691779823272e-05, |
|
"loss": 0.3996, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 3.2928064842958458, |
|
"grad_norm": 0.2778289306346103, |
|
"learning_rate": 1.2549987986061355e-05, |
|
"loss": 0.3777, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 3.2968591691995948, |
|
"grad_norm": 0.2871112908730644, |
|
"learning_rate": 1.2497344193672005e-05, |
|
"loss": 0.3735, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 3.3009118541033433, |
|
"grad_norm": 0.2703737925153029, |
|
"learning_rate": 1.2444760826641092e-05, |
|
"loss": 0.4039, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 3.3049645390070923, |
|
"grad_norm": 0.29268238257235124, |
|
"learning_rate": 1.2392238308467817e-05, |
|
"loss": 0.3783, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 3.309017223910841, |
|
"grad_norm": 0.25914824607343756, |
|
"learning_rate": 1.2339777062161326e-05, |
|
"loss": 0.3925, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 3.31306990881459, |
|
"grad_norm": 0.279073011138421, |
|
"learning_rate": 1.2287377510237293e-05, |
|
"loss": 0.3873, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 3.3171225937183384, |
|
"grad_norm": 0.28560425005596807, |
|
"learning_rate": 1.2235040074714488e-05, |
|
"loss": 0.3985, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 3.321175278622087, |
|
"grad_norm": 0.2434895695464389, |
|
"learning_rate": 1.2182765177111434e-05, |
|
"loss": 0.3672, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 3.325227963525836, |
|
"grad_norm": 0.3157947536896033, |
|
"learning_rate": 1.213055323844297e-05, |
|
"loss": 0.3688, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 3.3292806484295845, |
|
"grad_norm": 0.25622059523143437, |
|
"learning_rate": 1.2078404679216864e-05, |
|
"loss": 0.3856, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 3.3333333333333335, |
|
"grad_norm": 0.2549363724525438, |
|
"learning_rate": 1.2026319919430458e-05, |
|
"loss": 0.3818, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 3.337386018237082, |
|
"grad_norm": 0.27402175080321245, |
|
"learning_rate": 1.1974299378567227e-05, |
|
"loss": 0.377, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 3.341438703140831, |
|
"grad_norm": 0.2606916214581092, |
|
"learning_rate": 1.1922343475593462e-05, |
|
"loss": 0.3859, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 3.3454913880445796, |
|
"grad_norm": 0.26546023109038186, |
|
"learning_rate": 1.187045262895488e-05, |
|
"loss": 0.3857, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 3.349544072948328, |
|
"grad_norm": 0.28538081996767695, |
|
"learning_rate": 1.1818627256573203e-05, |
|
"loss": 0.3805, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 3.353596757852077, |
|
"grad_norm": 0.2625457689722491, |
|
"learning_rate": 1.1766867775842864e-05, |
|
"loss": 0.3957, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 3.3576494427558257, |
|
"grad_norm": 0.27508901977596273, |
|
"learning_rate": 1.1715174603627615e-05, |
|
"loss": 0.3839, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 3.3617021276595747, |
|
"grad_norm": 0.2986378797415174, |
|
"learning_rate": 1.1663548156257147e-05, |
|
"loss": 0.3789, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 3.365754812563323, |
|
"grad_norm": 0.23867793076966923, |
|
"learning_rate": 1.161198884952377e-05, |
|
"loss": 0.3996, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 3.3698074974670718, |
|
"grad_norm": 0.26597500572465627, |
|
"learning_rate": 1.1560497098679056e-05, |
|
"loss": 0.3785, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 3.3738601823708207, |
|
"grad_norm": 0.27059348209968437, |
|
"learning_rate": 1.1509073318430479e-05, |
|
"loss": 0.3949, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 3.3779128672745693, |
|
"grad_norm": 0.26741435023113813, |
|
"learning_rate": 1.1457717922938116e-05, |
|
"loss": 0.387, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 3.3819655521783183, |
|
"grad_norm": 0.28979849854333145, |
|
"learning_rate": 1.1406431325811233e-05, |
|
"loss": 0.4014, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 3.386018237082067, |
|
"grad_norm": 0.2804573125543681, |
|
"learning_rate": 1.135521394010506e-05, |
|
"loss": 0.369, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 3.3900709219858154, |
|
"grad_norm": 0.25260685176343123, |
|
"learning_rate": 1.1304066178317367e-05, |
|
"loss": 0.378, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 3.3941236068895644, |
|
"grad_norm": 0.29897722846356234, |
|
"learning_rate": 1.1252988452385199e-05, |
|
"loss": 0.3719, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 3.398176291793313, |
|
"grad_norm": 0.2657487645919887, |
|
"learning_rate": 1.1201981173681536e-05, |
|
"loss": 0.3698, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 3.402228976697062, |
|
"grad_norm": 0.26029149813444563, |
|
"learning_rate": 1.1151044753011991e-05, |
|
"loss": 0.3904, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 3.4062816616008105, |
|
"grad_norm": 0.2734679281200348, |
|
"learning_rate": 1.1100179600611491e-05, |
|
"loss": 0.3848, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 3.410334346504559, |
|
"grad_norm": 0.2716137131398029, |
|
"learning_rate": 1.1049386126140985e-05, |
|
"loss": 0.3741, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 3.414387031408308, |
|
"grad_norm": 0.2634184139064893, |
|
"learning_rate": 1.0998664738684128e-05, |
|
"loss": 0.3765, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 3.4184397163120566, |
|
"grad_norm": 0.27904435880000683, |
|
"learning_rate": 1.0948015846744e-05, |
|
"loss": 0.3849, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 3.4224924012158056, |
|
"grad_norm": 0.25797630616007766, |
|
"learning_rate": 1.0897439858239832e-05, |
|
"loss": 0.3801, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 3.426545086119554, |
|
"grad_norm": 0.2844606074488202, |
|
"learning_rate": 1.0846937180503652e-05, |
|
"loss": 0.37, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 3.430597771023303, |
|
"grad_norm": 0.26661373746076855, |
|
"learning_rate": 1.0796508220277117e-05, |
|
"loss": 0.399, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 3.4346504559270516, |
|
"grad_norm": 0.26920641761077396, |
|
"learning_rate": 1.0746153383708107e-05, |
|
"loss": 0.383, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 3.4387031408308006, |
|
"grad_norm": 0.2542666129754366, |
|
"learning_rate": 1.0695873076347579e-05, |
|
"loss": 0.3935, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 3.442755825734549, |
|
"grad_norm": 0.25541270782132497, |
|
"learning_rate": 1.0645667703146205e-05, |
|
"loss": 0.3712, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 3.4468085106382977, |
|
"grad_norm": 0.2914645649576862, |
|
"learning_rate": 1.0595537668451161e-05, |
|
"loss": 0.3908, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 3.4508611955420467, |
|
"grad_norm": 0.2596787124867935, |
|
"learning_rate": 1.0545483376002854e-05, |
|
"loss": 0.378, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 3.4549138804457953, |
|
"grad_norm": 0.28695781466548614, |
|
"learning_rate": 1.0495505228931676e-05, |
|
"loss": 0.373, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 3.4589665653495443, |
|
"grad_norm": 0.27698444545983414, |
|
"learning_rate": 1.044560362975474e-05, |
|
"loss": 0.3769, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 3.463019250253293, |
|
"grad_norm": 0.25443344921029964, |
|
"learning_rate": 1.0395778980372695e-05, |
|
"loss": 0.366, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 3.4670719351570414, |
|
"grad_norm": 0.2701842417622334, |
|
"learning_rate": 1.0346031682066381e-05, |
|
"loss": 0.3878, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 3.4711246200607904, |
|
"grad_norm": 0.2636816194464245, |
|
"learning_rate": 1.0296362135493724e-05, |
|
"loss": 0.3912, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 3.475177304964539, |
|
"grad_norm": 0.27153472589611827, |
|
"learning_rate": 1.0246770740686422e-05, |
|
"loss": 0.3831, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 3.479229989868288, |
|
"grad_norm": 0.27422607972872015, |
|
"learning_rate": 1.0197257897046743e-05, |
|
"loss": 0.3817, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 3.4832826747720365, |
|
"grad_norm": 0.26624394577954064, |
|
"learning_rate": 1.014782400334433e-05, |
|
"loss": 0.3951, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 3.487335359675785, |
|
"grad_norm": 0.2632487391095868, |
|
"learning_rate": 1.009846945771296e-05, |
|
"loss": 0.3748, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 3.491388044579534, |
|
"grad_norm": 0.2633740274445684, |
|
"learning_rate": 1.0049194657647363e-05, |
|
"loss": 0.3924, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 3.4954407294832825, |
|
"grad_norm": 0.25876546737194156, |
|
"learning_rate": 1.0000000000000006e-05, |
|
"loss": 0.3857, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 3.4994934143870315, |
|
"grad_norm": 0.2678369579513087, |
|
"learning_rate": 9.950885880977891e-06, |
|
"loss": 0.3701, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 3.50354609929078, |
|
"grad_norm": 0.2730054374452717, |
|
"learning_rate": 9.901852696139382e-06, |
|
"loss": 0.3713, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 3.5075987841945286, |
|
"grad_norm": 0.26020743387710277, |
|
"learning_rate": 9.852900840391027e-06, |
|
"loss": 0.3934, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 3.5116514690982776, |
|
"grad_norm": 0.2958818361826814, |
|
"learning_rate": 9.804030707984313e-06, |
|
"loss": 0.3774, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 3.515704154002026, |
|
"grad_norm": 0.2805318368422625, |
|
"learning_rate": 9.755242692512599e-06, |
|
"loss": 0.3669, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 3.519756838905775, |
|
"grad_norm": 0.2480031469657075, |
|
"learning_rate": 9.70653718690782e-06, |
|
"loss": 0.3898, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 3.5238095238095237, |
|
"grad_norm": 0.28233674872525927, |
|
"learning_rate": 9.657914583437454e-06, |
|
"loss": 0.3837, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 3.5278622087132723, |
|
"grad_norm": 0.2670526331881823, |
|
"learning_rate": 9.609375273701246e-06, |
|
"loss": 0.3735, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 3.5319148936170213, |
|
"grad_norm": 0.24365645103486785, |
|
"learning_rate": 9.560919648628133e-06, |
|
"loss": 0.3988, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 3.5359675785207703, |
|
"grad_norm": 0.27796258992061873, |
|
"learning_rate": 9.512548098473047e-06, |
|
"loss": 0.3958, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 3.540020263424519, |
|
"grad_norm": 0.7051140925998627, |
|
"learning_rate": 9.464261012813825e-06, |
|
"loss": 0.4069, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 3.5440729483282674, |
|
"grad_norm": 0.2746925444918877, |
|
"learning_rate": 9.416058780547987e-06, |
|
"loss": 0.3714, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 3.5481256332320164, |
|
"grad_norm": 0.25279237447573455, |
|
"learning_rate": 9.367941789889714e-06, |
|
"loss": 0.3909, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 3.552178318135765, |
|
"grad_norm": 0.24768085262923958, |
|
"learning_rate": 9.319910428366607e-06, |
|
"loss": 0.3755, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 3.556231003039514, |
|
"grad_norm": 0.2747154223771578, |
|
"learning_rate": 9.271965082816667e-06, |
|
"loss": 0.3875, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 3.5602836879432624, |
|
"grad_norm": 0.28027387227942985, |
|
"learning_rate": 9.224106139385111e-06, |
|
"loss": 0.3775, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 3.564336372847011, |
|
"grad_norm": 0.24608580364638544, |
|
"learning_rate": 9.176333983521291e-06, |
|
"loss": 0.3836, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 3.56838905775076, |
|
"grad_norm": 0.26904768871843926, |
|
"learning_rate": 9.12864899997558e-06, |
|
"loss": 0.3985, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 3.5724417426545085, |
|
"grad_norm": 0.27128606048802917, |
|
"learning_rate": 9.08105157279628e-06, |
|
"loss": 0.3891, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 3.5764944275582575, |
|
"grad_norm": 0.2464368499810563, |
|
"learning_rate": 9.03354208532653e-06, |
|
"loss": 0.3944, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 3.580547112462006, |
|
"grad_norm": 0.26687798754210473, |
|
"learning_rate": 8.986120920201205e-06, |
|
"loss": 0.3631, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 3.5845997973657546, |
|
"grad_norm": 0.2868137283380602, |
|
"learning_rate": 8.938788459343852e-06, |
|
"loss": 0.3779, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 3.5886524822695036, |
|
"grad_norm": 0.2642451697752491, |
|
"learning_rate": 8.8915450839636e-06, |
|
"loss": 0.3791, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 3.592705167173252, |
|
"grad_norm": 0.27812817513330057, |
|
"learning_rate": 8.844391174552116e-06, |
|
"loss": 0.4046, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 3.596757852077001, |
|
"grad_norm": 0.2893191344552168, |
|
"learning_rate": 8.797327110880479e-06, |
|
"loss": 0.3684, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 3.6008105369807497, |
|
"grad_norm": 0.2641902188587831, |
|
"learning_rate": 8.750353271996206e-06, |
|
"loss": 0.3761, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 3.6048632218844983, |
|
"grad_norm": 0.27965262140526126, |
|
"learning_rate": 8.703470036220132e-06, |
|
"loss": 0.3831, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 3.6089159067882473, |
|
"grad_norm": 0.2551322147085001, |
|
"learning_rate": 8.656677781143394e-06, |
|
"loss": 0.3865, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 3.612968591691996, |
|
"grad_norm": 0.25594419535685065, |
|
"learning_rate": 8.609976883624377e-06, |
|
"loss": 0.3824, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 3.617021276595745, |
|
"grad_norm": 0.2668346673433008, |
|
"learning_rate": 8.563367719785698e-06, |
|
"loss": 0.3564, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 3.6210739614994933, |
|
"grad_norm": 0.26069517385786256, |
|
"learning_rate": 8.516850665011138e-06, |
|
"loss": 0.3855, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 3.625126646403242, |
|
"grad_norm": 0.25569320363985437, |
|
"learning_rate": 8.47042609394269e-06, |
|
"loss": 0.3772, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 3.629179331306991, |
|
"grad_norm": 0.258067276079996, |
|
"learning_rate": 8.424094380477432e-06, |
|
"loss": 0.3823, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 3.63323201621074, |
|
"grad_norm": 0.2620702666969095, |
|
"learning_rate": 8.37785589776465e-06, |
|
"loss": 0.3783, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 3.6372847011144884, |
|
"grad_norm": 0.2726967173128483, |
|
"learning_rate": 8.331711018202694e-06, |
|
"loss": 0.381, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 3.641337386018237, |
|
"grad_norm": 0.259983151457623, |
|
"learning_rate": 8.285660113436104e-06, |
|
"loss": 0.3681, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 3.645390070921986, |
|
"grad_norm": 0.2622312817717958, |
|
"learning_rate": 8.239703554352527e-06, |
|
"loss": 0.3651, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 3.6494427558257345, |
|
"grad_norm": 0.25590741385747956, |
|
"learning_rate": 8.193841711079775e-06, |
|
"loss": 0.387, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 3.6534954407294835, |
|
"grad_norm": 0.27101692134935057, |
|
"learning_rate": 8.148074952982828e-06, |
|
"loss": 0.3671, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 3.657548125633232, |
|
"grad_norm": 0.2560869396492808, |
|
"learning_rate": 8.102403648660859e-06, |
|
"loss": 0.3858, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 3.6616008105369806, |
|
"grad_norm": 0.23509852953511742, |
|
"learning_rate": 8.056828165944282e-06, |
|
"loss": 0.3926, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 3.6656534954407296, |
|
"grad_norm": 0.24745891186561988, |
|
"learning_rate": 8.011348871891762e-06, |
|
"loss": 0.3971, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 3.669706180344478, |
|
"grad_norm": 0.2581252689769421, |
|
"learning_rate": 7.965966132787287e-06, |
|
"loss": 0.3928, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 3.673758865248227, |
|
"grad_norm": 0.25265013418438403, |
|
"learning_rate": 7.920680314137189e-06, |
|
"loss": 0.3862, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 3.6778115501519757, |
|
"grad_norm": 0.23624572949864894, |
|
"learning_rate": 7.875491780667246e-06, |
|
"loss": 0.3991, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 3.6818642350557242, |
|
"grad_norm": 0.27279624118736484, |
|
"learning_rate": 7.830400896319667e-06, |
|
"loss": 0.3829, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 3.6859169199594732, |
|
"grad_norm": 0.25203918482922044, |
|
"learning_rate": 7.785408024250259e-06, |
|
"loss": 0.3906, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 3.689969604863222, |
|
"grad_norm": 0.2293814254120697, |
|
"learning_rate": 7.74051352682542e-06, |
|
"loss": 0.4032, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 3.694022289766971, |
|
"grad_norm": 0.23877643679843694, |
|
"learning_rate": 7.695717765619257e-06, |
|
"loss": 0.391, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 3.6980749746707193, |
|
"grad_norm": 0.26103122293520664, |
|
"learning_rate": 7.651021101410673e-06, |
|
"loss": 0.3932, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 3.702127659574468, |
|
"grad_norm": 0.2545919331953898, |
|
"learning_rate": 7.606423894180464e-06, |
|
"loss": 0.3661, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 3.706180344478217, |
|
"grad_norm": 0.25485257631189995, |
|
"learning_rate": 7.56192650310839e-06, |
|
"loss": 0.3705, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 3.7102330293819654, |
|
"grad_norm": 0.2791055521595466, |
|
"learning_rate": 7.517529286570349e-06, |
|
"loss": 0.3785, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 3.7142857142857144, |
|
"grad_norm": 0.26615395865428915, |
|
"learning_rate": 7.473232602135387e-06, |
|
"loss": 0.3818, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 3.718338399189463, |
|
"grad_norm": 0.25261794890827705, |
|
"learning_rate": 7.429036806562935e-06, |
|
"loss": 0.3696, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 3.7223910840932115, |
|
"grad_norm": 0.24877986054887413, |
|
"learning_rate": 7.3849422557998455e-06, |
|
"loss": 0.396, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 3.7264437689969605, |
|
"grad_norm": 0.25119564908315795, |
|
"learning_rate": 7.340949304977567e-06, |
|
"loss": 0.3691, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 3.7304964539007095, |
|
"grad_norm": 0.23963904655600143, |
|
"learning_rate": 7.297058308409282e-06, |
|
"loss": 0.3853, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 3.734549138804458, |
|
"grad_norm": 0.23874508630645142, |
|
"learning_rate": 7.25326961958704e-06, |
|
"loss": 0.3979, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 3.7386018237082066, |
|
"grad_norm": 0.2613251252562162, |
|
"learning_rate": 7.209583591178921e-06, |
|
"loss": 0.361, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 3.742654508611955, |
|
"grad_norm": 0.247763757883156, |
|
"learning_rate": 7.1660005750261925e-06, |
|
"loss": 0.3905, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 3.746707193515704, |
|
"grad_norm": 0.25433935321193435, |
|
"learning_rate": 7.1225209221404765e-06, |
|
"loss": 0.3587, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 3.750759878419453, |
|
"grad_norm": 0.24161679896860355, |
|
"learning_rate": 7.079144982700909e-06, |
|
"loss": 0.3794, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 3.7548125633232017, |
|
"grad_norm": 0.24676946599970728, |
|
"learning_rate": 7.0358731060513695e-06, |
|
"loss": 0.3831, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 3.7588652482269502, |
|
"grad_norm": 0.24864272551566333, |
|
"learning_rate": 6.99270564069757e-06, |
|
"loss": 0.3559, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 3.762917933130699, |
|
"grad_norm": 0.2721763681367414, |
|
"learning_rate": 6.949642934304375e-06, |
|
"loss": 0.3794, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 3.7669706180344478, |
|
"grad_norm": 0.2555495781313021, |
|
"learning_rate": 6.906685333692871e-06, |
|
"loss": 0.3742, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 3.7710233029381968, |
|
"grad_norm": 0.24024176761244234, |
|
"learning_rate": 6.86383318483769e-06, |
|
"loss": 0.3747, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 3.7750759878419453, |
|
"grad_norm": 0.24739244511136232, |
|
"learning_rate": 6.821086832864139e-06, |
|
"loss": 0.3876, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 3.779128672745694, |
|
"grad_norm": 0.25229345464321506, |
|
"learning_rate": 6.77844662204546e-06, |
|
"loss": 0.399, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 3.783181357649443, |
|
"grad_norm": 0.23944201751332087, |
|
"learning_rate": 6.7359128958000455e-06, |
|
"loss": 0.4029, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 3.7872340425531914, |
|
"grad_norm": 0.25122823970264063, |
|
"learning_rate": 6.693485996688695e-06, |
|
"loss": 0.3698, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 3.7912867274569404, |
|
"grad_norm": 0.25961657067824834, |
|
"learning_rate": 6.651166266411801e-06, |
|
"loss": 0.3811, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 3.795339412360689, |
|
"grad_norm": 0.2502513717675922, |
|
"learning_rate": 6.6089540458066725e-06, |
|
"loss": 0.3864, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 3.7993920972644375, |
|
"grad_norm": 0.2610775403663525, |
|
"learning_rate": 6.566849674844711e-06, |
|
"loss": 0.3712, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 3.8034447821681865, |
|
"grad_norm": 0.247743927908705, |
|
"learning_rate": 6.524853492628747e-06, |
|
"loss": 0.3706, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 3.807497467071935, |
|
"grad_norm": 0.23974356395992896, |
|
"learning_rate": 6.4829658373902536e-06, |
|
"loss": 0.4033, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 3.811550151975684, |
|
"grad_norm": 0.25639644385453203, |
|
"learning_rate": 6.441187046486648e-06, |
|
"loss": 0.3845, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 3.8156028368794326, |
|
"grad_norm": 0.24478652216578706, |
|
"learning_rate": 6.399517456398567e-06, |
|
"loss": 0.384, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 3.819655521783181, |
|
"grad_norm": 0.24102927962772855, |
|
"learning_rate": 6.357957402727164e-06, |
|
"loss": 0.3852, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 3.82370820668693, |
|
"grad_norm": 0.24609536521299766, |
|
"learning_rate": 6.316507220191395e-06, |
|
"loss": 0.3911, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 3.8277608915906787, |
|
"grad_norm": 0.24020736597303688, |
|
"learning_rate": 6.275167242625331e-06, |
|
"loss": 0.3907, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 3.8318135764944277, |
|
"grad_norm": 0.2575315279870402, |
|
"learning_rate": 6.233937802975471e-06, |
|
"loss": 0.3933, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 3.835866261398176, |
|
"grad_norm": 0.25265995274165626, |
|
"learning_rate": 6.192819233298046e-06, |
|
"loss": 0.3734, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 3.8399189463019248, |
|
"grad_norm": 0.24215442344831176, |
|
"learning_rate": 6.151811864756383e-06, |
|
"loss": 0.3716, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 3.8439716312056738, |
|
"grad_norm": 0.253408968259172, |
|
"learning_rate": 6.1109160276181655e-06, |
|
"loss": 0.396, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 3.8480243161094227, |
|
"grad_norm": 1.3659760912520276, |
|
"learning_rate": 6.070132051252868e-06, |
|
"loss": 0.4565, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 3.8520770010131713, |
|
"grad_norm": 0.23365812349180248, |
|
"learning_rate": 6.0294602641290034e-06, |
|
"loss": 0.3779, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 3.85612968591692, |
|
"grad_norm": 0.23662609605247903, |
|
"learning_rate": 5.988900993811575e-06, |
|
"loss": 0.3765, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 3.860182370820669, |
|
"grad_norm": 0.2438744097200581, |
|
"learning_rate": 5.948454566959363e-06, |
|
"loss": 0.3833, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 3.8642350557244174, |
|
"grad_norm": 0.24384298517536226, |
|
"learning_rate": 5.908121309322328e-06, |
|
"loss": 0.3727, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 3.8682877406281664, |
|
"grad_norm": 0.23522797137541018, |
|
"learning_rate": 5.867901545738976e-06, |
|
"loss": 0.3895, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 3.872340425531915, |
|
"grad_norm": 0.2428423246864904, |
|
"learning_rate": 5.827795600133774e-06, |
|
"loss": 0.3717, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 3.8763931104356635, |
|
"grad_norm": 0.23184907736324284, |
|
"learning_rate": 5.787803795514466e-06, |
|
"loss": 0.3722, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 3.8804457953394125, |
|
"grad_norm": 0.22695505517851844, |
|
"learning_rate": 5.747926453969576e-06, |
|
"loss": 0.3944, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 3.884498480243161, |
|
"grad_norm": 0.25622634926368437, |
|
"learning_rate": 5.708163896665708e-06, |
|
"loss": 0.3698, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 3.88855116514691, |
|
"grad_norm": 0.2330703813197096, |
|
"learning_rate": 5.668516443845047e-06, |
|
"loss": 0.3787, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 3.8926038500506586, |
|
"grad_norm": 0.2453268239202265, |
|
"learning_rate": 5.6289844148227225e-06, |
|
"loss": 0.3632, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 3.896656534954407, |
|
"grad_norm": 0.26220693956586866, |
|
"learning_rate": 5.5895681279842615e-06, |
|
"loss": 0.377, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 3.900709219858156, |
|
"grad_norm": 0.24331804630697973, |
|
"learning_rate": 5.550267900783019e-06, |
|
"loss": 0.3718, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 3.9047619047619047, |
|
"grad_norm": 0.22770359856100209, |
|
"learning_rate": 5.511084049737623e-06, |
|
"loss": 0.3798, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 3.9088145896656536, |
|
"grad_norm": 0.2428561035827692, |
|
"learning_rate": 5.4720168904294215e-06, |
|
"loss": 0.3854, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 3.912867274569402, |
|
"grad_norm": 0.2611971830968991, |
|
"learning_rate": 5.433066737499948e-06, |
|
"loss": 0.3769, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 3.9169199594731507, |
|
"grad_norm": 0.2406934295793003, |
|
"learning_rate": 5.394233904648376e-06, |
|
"loss": 0.3654, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 3.9209726443768997, |
|
"grad_norm": 0.22090493999761748, |
|
"learning_rate": 5.355518704628997e-06, |
|
"loss": 0.3815, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 3.9250253292806483, |
|
"grad_norm": 0.24259711573794973, |
|
"learning_rate": 5.316921449248731e-06, |
|
"loss": 0.3785, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 3.9290780141843973, |
|
"grad_norm": 1.5430490430588688, |
|
"learning_rate": 5.278442449364538e-06, |
|
"loss": 0.383, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 3.933130699088146, |
|
"grad_norm": 0.24031238951607808, |
|
"learning_rate": 5.240082014881016e-06, |
|
"loss": 0.3976, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 3.9371833839918944, |
|
"grad_norm": 0.2437610986619112, |
|
"learning_rate": 5.201840454747822e-06, |
|
"loss": 0.3696, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 3.9412360688956434, |
|
"grad_norm": 0.22082397830342182, |
|
"learning_rate": 5.163718076957223e-06, |
|
"loss": 0.397, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 3.9452887537993924, |
|
"grad_norm": 0.300049065779442, |
|
"learning_rate": 5.125715188541609e-06, |
|
"loss": 0.3745, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 3.949341438703141, |
|
"grad_norm": 0.24960191240344437, |
|
"learning_rate": 5.087832095571021e-06, |
|
"loss": 0.3595, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 3.9533941236068895, |
|
"grad_norm": 0.23260955229232055, |
|
"learning_rate": 5.0500691031506766e-06, |
|
"loss": 0.3854, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 3.9574468085106385, |
|
"grad_norm": 0.2347783236855166, |
|
"learning_rate": 5.01242651541854e-06, |
|
"loss": 0.3778, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 3.961499493414387, |
|
"grad_norm": 0.24120605951175264, |
|
"learning_rate": 4.974904635542815e-06, |
|
"loss": 0.3767, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 3.965552178318136, |
|
"grad_norm": 0.2562211408097052, |
|
"learning_rate": 4.937503765719582e-06, |
|
"loss": 0.3729, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 3.9696048632218845, |
|
"grad_norm": 0.2291642976443571, |
|
"learning_rate": 4.900224207170299e-06, |
|
"loss": 0.3786, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 3.973657548125633, |
|
"grad_norm": 0.24761238539181818, |
|
"learning_rate": 4.8630662601394065e-06, |
|
"loss": 0.3981, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 3.977710233029382, |
|
"grad_norm": 0.2307968561015442, |
|
"learning_rate": 4.8260302238918995e-06, |
|
"loss": 0.3677, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 3.9817629179331306, |
|
"grad_norm": 0.22523116284897532, |
|
"learning_rate": 4.789116396710924e-06, |
|
"loss": 0.3849, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 3.9858156028368796, |
|
"grad_norm": 0.23670569985444856, |
|
"learning_rate": 4.752325075895368e-06, |
|
"loss": 0.3701, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 3.989868287740628, |
|
"grad_norm": 0.2470731180414348, |
|
"learning_rate": 4.715656557757473e-06, |
|
"loss": 0.3654, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 3.9939209726443767, |
|
"grad_norm": 0.6229189248354251, |
|
"learning_rate": 4.679111137620442e-06, |
|
"loss": 0.397, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 3.9979736575481257, |
|
"grad_norm": 0.2460543297899704, |
|
"learning_rate": 4.6426891098160585e-06, |
|
"loss": 0.3777, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 4.0040526849037485, |
|
"grad_norm": 0.43441965883150363, |
|
"learning_rate": 4.6063907676823474e-06, |
|
"loss": 0.3207, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 4.008105369807497, |
|
"grad_norm": 0.3172185792272455, |
|
"learning_rate": 4.570216403561141e-06, |
|
"loss": 0.3441, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 4.0121580547112465, |
|
"grad_norm": 0.2559676560011084, |
|
"learning_rate": 4.534166308795815e-06, |
|
"loss": 0.361, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 4.016210739614995, |
|
"grad_norm": 0.5003179698215972, |
|
"learning_rate": 4.498240773728859e-06, |
|
"loss": 0.3313, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 4.020263424518744, |
|
"grad_norm": 0.39127556056111495, |
|
"learning_rate": 4.462440087699609e-06, |
|
"loss": 0.3437, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 4.024316109422492, |
|
"grad_norm": 0.2923205642374672, |
|
"learning_rate": 4.426764539041861e-06, |
|
"loss": 0.3312, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 4.028368794326241, |
|
"grad_norm": 0.34303442581156296, |
|
"learning_rate": 4.391214415081582e-06, |
|
"loss": 0.3339, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 4.03242147922999, |
|
"grad_norm": 0.37236892902762114, |
|
"learning_rate": 4.355790002134579e-06, |
|
"loss": 0.3317, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 4.036474164133739, |
|
"grad_norm": 0.29381660233429446, |
|
"learning_rate": 4.320491585504207e-06, |
|
"loss": 0.347, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 4.040526849037487, |
|
"grad_norm": 0.2828414471885096, |
|
"learning_rate": 4.2853194494790615e-06, |
|
"loss": 0.3428, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 4.044579533941236, |
|
"grad_norm": 0.3059883256410596, |
|
"learning_rate": 4.250273877330691e-06, |
|
"loss": 0.3205, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 4.048632218844984, |
|
"grad_norm": 0.35630036602168547, |
|
"learning_rate": 4.215355151311313e-06, |
|
"loss": 0.3457, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 4.052684903748734, |
|
"grad_norm": 0.3391856526917267, |
|
"learning_rate": 4.180563552651542e-06, |
|
"loss": 0.3428, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 4.056737588652482, |
|
"grad_norm": 0.26573757236833684, |
|
"learning_rate": 4.145899361558147e-06, |
|
"loss": 0.339, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 4.060790273556231, |
|
"grad_norm": 0.2637561449078847, |
|
"learning_rate": 4.111362857211738e-06, |
|
"loss": 0.3496, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 4.0648429584599794, |
|
"grad_norm": 0.3202643730230884, |
|
"learning_rate": 4.076954317764592e-06, |
|
"loss": 0.3259, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 4.068895643363729, |
|
"grad_norm": 0.2859231904760011, |
|
"learning_rate": 4.042674020338335e-06, |
|
"loss": 0.3594, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 4.072948328267477, |
|
"grad_norm": 0.2655163295044421, |
|
"learning_rate": 4.0085222410217835e-06, |
|
"loss": 0.3356, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 4.077001013171226, |
|
"grad_norm": 0.2567605286085258, |
|
"learning_rate": 3.974499254868674e-06, |
|
"loss": 0.3365, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 4.0810536980749745, |
|
"grad_norm": 0.29881391994396056, |
|
"learning_rate": 3.940605335895451e-06, |
|
"loss": 0.3191, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 4.085106382978723, |
|
"grad_norm": 0.28317244235385675, |
|
"learning_rate": 3.90684075707908e-06, |
|
"loss": 0.3338, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 4.0891590678824725, |
|
"grad_norm": 0.2727772100652722, |
|
"learning_rate": 3.8732057903548505e-06, |
|
"loss": 0.337, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 4.093211752786221, |
|
"grad_norm": 0.2411091975837039, |
|
"learning_rate": 3.8397007066141375e-06, |
|
"loss": 0.3317, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 4.09726443768997, |
|
"grad_norm": 0.39773060601877774, |
|
"learning_rate": 3.806325775702304e-06, |
|
"loss": 0.3216, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 4.101317122593718, |
|
"grad_norm": 0.23294668790624837, |
|
"learning_rate": 3.773081266416434e-06, |
|
"loss": 0.3432, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 4.105369807497467, |
|
"grad_norm": 0.25048276745175535, |
|
"learning_rate": 3.739967446503245e-06, |
|
"loss": 0.3537, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 4.109422492401216, |
|
"grad_norm": 0.23963348940540824, |
|
"learning_rate": 3.706984582656894e-06, |
|
"loss": 0.3502, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 4.113475177304965, |
|
"grad_norm": 0.23162128480111757, |
|
"learning_rate": 3.6741329405168237e-06, |
|
"loss": 0.3424, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 4.117527862208713, |
|
"grad_norm": 0.2714004979790431, |
|
"learning_rate": 3.641412784665648e-06, |
|
"loss": 0.3214, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 4.121580547112462, |
|
"grad_norm": 0.2446202152678492, |
|
"learning_rate": 3.608824378627005e-06, |
|
"loss": 0.3632, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 4.12563323201621, |
|
"grad_norm": 0.23320102692982828, |
|
"learning_rate": 3.5763679848634337e-06, |
|
"loss": 0.3268, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 4.12968591691996, |
|
"grad_norm": 0.22971961010867067, |
|
"learning_rate": 3.544043864774269e-06, |
|
"loss": 0.3146, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 4.133738601823708, |
|
"grad_norm": 0.2393940157111149, |
|
"learning_rate": 3.5118522786935282e-06, |
|
"loss": 0.3218, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 4.137791286727457, |
|
"grad_norm": 0.2366905046775755, |
|
"learning_rate": 3.479793485887819e-06, |
|
"loss": 0.3285, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 4.141843971631205, |
|
"grad_norm": 0.23327188316325834, |
|
"learning_rate": 3.4478677445542653e-06, |
|
"loss": 0.346, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 4.145896656534954, |
|
"grad_norm": 0.23805694262971164, |
|
"learning_rate": 3.4160753118183767e-06, |
|
"loss": 0.3201, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 4.149949341438703, |
|
"grad_norm": 0.21952183222157157, |
|
"learning_rate": 3.3844164437320527e-06, |
|
"loss": 0.3558, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 4.154002026342452, |
|
"grad_norm": 0.2367399283120628, |
|
"learning_rate": 3.3528913952714558e-06, |
|
"loss": 0.3375, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 4.1580547112462005, |
|
"grad_norm": 0.24076807751844387, |
|
"learning_rate": 3.321500420335e-06, |
|
"loss": 0.3581, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 4.162107396149949, |
|
"grad_norm": 0.2361755515108673, |
|
"learning_rate": 3.290243771741275e-06, |
|
"loss": 0.3234, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 4.1661600810536985, |
|
"grad_norm": 0.22473654665646817, |
|
"learning_rate": 3.2591217012270325e-06, |
|
"loss": 0.3413, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 4.170212765957447, |
|
"grad_norm": 0.22618701949927464, |
|
"learning_rate": 3.228134459445149e-06, |
|
"loss": 0.3442, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 4.174265450861196, |
|
"grad_norm": 0.229825739318835, |
|
"learning_rate": 3.1972822959626205e-06, |
|
"loss": 0.3377, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 4.178318135764944, |
|
"grad_norm": 0.238151569893642, |
|
"learning_rate": 3.166565459258513e-06, |
|
"loss": 0.33, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 4.182370820668693, |
|
"grad_norm": 0.22754261204629825, |
|
"learning_rate": 3.1359841967220193e-06, |
|
"loss": 0.3252, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 4.186423505572442, |
|
"grad_norm": 0.21644352401658654, |
|
"learning_rate": 3.105538754650419e-06, |
|
"loss": 0.3467, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 4.190476190476191, |
|
"grad_norm": 0.2222016041322117, |
|
"learning_rate": 3.07522937824712e-06, |
|
"loss": 0.3338, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 4.194528875379939, |
|
"grad_norm": 0.23395716092818578, |
|
"learning_rate": 3.0450563116196697e-06, |
|
"loss": 0.3178, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 4.198581560283688, |
|
"grad_norm": 0.22676626549455461, |
|
"learning_rate": 3.0150197977778008e-06, |
|
"loss": 0.3508, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 4.202634245187436, |
|
"grad_norm": 0.22253048027652203, |
|
"learning_rate": 2.985120078631465e-06, |
|
"loss": 0.3212, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 4.206686930091186, |
|
"grad_norm": 0.22580895403178536, |
|
"learning_rate": 2.9553573949888893e-06, |
|
"loss": 0.3391, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 4.210739614994934, |
|
"grad_norm": 0.22623805753396767, |
|
"learning_rate": 2.9257319865546384e-06, |
|
"loss": 0.3543, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 4.214792299898683, |
|
"grad_norm": 0.23771752218328732, |
|
"learning_rate": 2.896244091927678e-06, |
|
"loss": 0.3334, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 4.218844984802431, |
|
"grad_norm": 0.2175923691488866, |
|
"learning_rate": 2.8668939485994584e-06, |
|
"loss": 0.3413, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 4.22289766970618, |
|
"grad_norm": 0.22294735532990967, |
|
"learning_rate": 2.837681792951994e-06, |
|
"loss": 0.3498, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 4.226950354609929, |
|
"grad_norm": 0.22575799265879024, |
|
"learning_rate": 2.808607860255981e-06, |
|
"loss": 0.356, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 4.231003039513678, |
|
"grad_norm": 0.22390186791509342, |
|
"learning_rate": 2.7796723846688634e-06, |
|
"loss": 0.3464, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 4.2350557244174265, |
|
"grad_norm": 0.2372253372859002, |
|
"learning_rate": 2.7508755992329937e-06, |
|
"loss": 0.333, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 4.239108409321175, |
|
"grad_norm": 0.23263498450222925, |
|
"learning_rate": 2.722217735873718e-06, |
|
"loss": 0.3196, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 4.243161094224924, |
|
"grad_norm": 0.22082644683840363, |
|
"learning_rate": 2.6936990253975315e-06, |
|
"loss": 0.3429, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 4.247213779128673, |
|
"grad_norm": 0.21254303248638728, |
|
"learning_rate": 2.665319697490205e-06, |
|
"loss": 0.3398, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 4.251266464032422, |
|
"grad_norm": 0.2243713817826788, |
|
"learning_rate": 2.637079980714945e-06, |
|
"loss": 0.3514, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 4.25531914893617, |
|
"grad_norm": 0.23194174383328658, |
|
"learning_rate": 2.6089801025105453e-06, |
|
"loss": 0.3139, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 4.259371833839919, |
|
"grad_norm": 0.21504063264920145, |
|
"learning_rate": 2.581020289189571e-06, |
|
"loss": 0.3592, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 4.263424518743667, |
|
"grad_norm": 0.22028653603149462, |
|
"learning_rate": 2.553200765936501e-06, |
|
"loss": 0.3487, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 4.267477203647417, |
|
"grad_norm": 0.21704198667745012, |
|
"learning_rate": 2.525521756805962e-06, |
|
"loss": 0.3417, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 4.271529888551165, |
|
"grad_norm": 0.23369822909954296, |
|
"learning_rate": 2.497983484720885e-06, |
|
"loss": 0.3195, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 4.275582573454914, |
|
"grad_norm": 0.22063242924242107, |
|
"learning_rate": 2.470586171470728e-06, |
|
"loss": 0.3279, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 4.279635258358662, |
|
"grad_norm": 0.22141494083722552, |
|
"learning_rate": 2.4433300377096836e-06, |
|
"loss": 0.3405, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 4.283687943262412, |
|
"grad_norm": 0.219540341440935, |
|
"learning_rate": 2.4162153029549073e-06, |
|
"loss": 0.3408, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 4.28774062816616, |
|
"grad_norm": 0.23861957890867846, |
|
"learning_rate": 2.3892421855847458e-06, |
|
"loss": 0.3333, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 4.291793313069909, |
|
"grad_norm": 0.21548248088429095, |
|
"learning_rate": 2.362410902836978e-06, |
|
"loss": 0.3426, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 4.295845997973657, |
|
"grad_norm": 0.24508055932875364, |
|
"learning_rate": 2.3357216708070653e-06, |
|
"loss": 0.3153, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 4.299898682877406, |
|
"grad_norm": 0.2306722897699338, |
|
"learning_rate": 2.309174704446411e-06, |
|
"loss": 0.3208, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 4.303951367781155, |
|
"grad_norm": 0.23510473888692848, |
|
"learning_rate": 2.2827702175606437e-06, |
|
"loss": 0.3336, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 4.308004052684904, |
|
"grad_norm": 0.22997236852861927, |
|
"learning_rate": 2.256508422807855e-06, |
|
"loss": 0.3411, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 4.3120567375886525, |
|
"grad_norm": 0.2253232394674051, |
|
"learning_rate": 2.230389531696946e-06, |
|
"loss": 0.3369, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 4.316109422492401, |
|
"grad_norm": 0.2143512913097352, |
|
"learning_rate": 2.204413754585857e-06, |
|
"loss": 0.3418, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 4.32016210739615, |
|
"grad_norm": 0.22817923766265974, |
|
"learning_rate": 2.1785813006799406e-06, |
|
"loss": 0.3207, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 4.324214792299899, |
|
"grad_norm": 0.23139168272181188, |
|
"learning_rate": 2.1528923780302224e-06, |
|
"loss": 0.3216, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 4.328267477203648, |
|
"grad_norm": 0.23507424193237603, |
|
"learning_rate": 2.127347193531757e-06, |
|
"loss": 0.3515, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 4.332320162107396, |
|
"grad_norm": 0.23575923810648702, |
|
"learning_rate": 2.101945952921942e-06, |
|
"loss": 0.3274, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 4.336372847011145, |
|
"grad_norm": 0.22545259489057787, |
|
"learning_rate": 2.0766888607788906e-06, |
|
"loss": 0.312, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 4.340425531914893, |
|
"grad_norm": 0.23495567380127616, |
|
"learning_rate": 2.0515761205197337e-06, |
|
"loss": 0.3323, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 4.344478216818643, |
|
"grad_norm": 0.22098817493937745, |
|
"learning_rate": 2.0266079343990453e-06, |
|
"loss": 0.3464, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 4.348530901722391, |
|
"grad_norm": 0.21753324625510542, |
|
"learning_rate": 2.0017845035071494e-06, |
|
"loss": 0.3274, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 4.35258358662614, |
|
"grad_norm": 0.22625446678673608, |
|
"learning_rate": 1.9771060277685537e-06, |
|
"loss": 0.3154, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 4.356636271529888, |
|
"grad_norm": 0.23500122248011943, |
|
"learning_rate": 1.95257270594031e-06, |
|
"loss": 0.3405, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 4.360688956433638, |
|
"grad_norm": 0.2386139808225573, |
|
"learning_rate": 1.9281847356104188e-06, |
|
"loss": 0.33, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 4.364741641337386, |
|
"grad_norm": 0.23769347511234643, |
|
"learning_rate": 1.9039423131962365e-06, |
|
"loss": 0.3398, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 4.368794326241135, |
|
"grad_norm": 0.22501807224528134, |
|
"learning_rate": 1.8798456339429027e-06, |
|
"loss": 0.3356, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 4.372847011144883, |
|
"grad_norm": 0.22216700448089727, |
|
"learning_rate": 1.8558948919217612e-06, |
|
"loss": 0.3441, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 4.376899696048632, |
|
"grad_norm": 0.22077620576922963, |
|
"learning_rate": 1.8320902800287954e-06, |
|
"loss": 0.3368, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 4.380952380952381, |
|
"grad_norm": 0.21975774334133966, |
|
"learning_rate": 1.8084319899830726e-06, |
|
"loss": 0.3422, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 4.38500506585613, |
|
"grad_norm": 0.2427476217633319, |
|
"learning_rate": 1.7849202123252097e-06, |
|
"loss": 0.3472, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 4.3890577507598785, |
|
"grad_norm": 0.2238976923066399, |
|
"learning_rate": 1.7615551364158401e-06, |
|
"loss": 0.3311, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 4.393110435663627, |
|
"grad_norm": 0.22245634423159233, |
|
"learning_rate": 1.738336950434061e-06, |
|
"loss": 0.3425, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 4.397163120567376, |
|
"grad_norm": 0.2291645134335145, |
|
"learning_rate": 1.715265841375957e-06, |
|
"loss": 0.3182, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 4.401215805471125, |
|
"grad_norm": 0.2207581523272618, |
|
"learning_rate": 1.6923419950530684e-06, |
|
"loss": 0.3351, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 4.405268490374874, |
|
"grad_norm": 0.23169198562237595, |
|
"learning_rate": 1.6695655960909008e-06, |
|
"loss": 0.3038, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 4.409321175278622, |
|
"grad_norm": 0.22495310274089256, |
|
"learning_rate": 1.646936827927441e-06, |
|
"loss": 0.3411, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 4.413373860182371, |
|
"grad_norm": 0.22847221895158423, |
|
"learning_rate": 1.6244558728116766e-06, |
|
"loss": 0.333, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 4.417426545086119, |
|
"grad_norm": 0.22898838199855842, |
|
"learning_rate": 1.6021229118021265e-06, |
|
"loss": 0.3323, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 4.421479229989869, |
|
"grad_norm": 0.2216867202534966, |
|
"learning_rate": 1.5799381247653967e-06, |
|
"loss": 0.3372, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 4.425531914893617, |
|
"grad_norm": 0.22599597212969008, |
|
"learning_rate": 1.5579016903747013e-06, |
|
"loss": 0.346, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 4.429584599797366, |
|
"grad_norm": 0.23297686867498105, |
|
"learning_rate": 1.5360137861084656e-06, |
|
"loss": 0.3392, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 4.433637284701114, |
|
"grad_norm": 0.23207674307321688, |
|
"learning_rate": 1.5142745882488475e-06, |
|
"loss": 0.3423, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 4.437689969604863, |
|
"grad_norm": 0.2332743630255867, |
|
"learning_rate": 1.4926842718803691e-06, |
|
"loss": 0.3412, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 4.441742654508612, |
|
"grad_norm": 0.2168993664129981, |
|
"learning_rate": 1.4712430108884657e-06, |
|
"loss": 0.3533, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 4.445795339412361, |
|
"grad_norm": 0.2101188856783912, |
|
"learning_rate": 1.4499509779581078e-06, |
|
"loss": 0.3349, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 4.449848024316109, |
|
"grad_norm": 0.2194823348902552, |
|
"learning_rate": 1.4288083445723988e-06, |
|
"loss": 0.3084, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 4.453900709219858, |
|
"grad_norm": 0.23080580647339557, |
|
"learning_rate": 1.4078152810112045e-06, |
|
"loss": 0.3266, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 4.4579533941236065, |
|
"grad_norm": 0.21833585919702747, |
|
"learning_rate": 1.3869719563497697e-06, |
|
"loss": 0.3424, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 4.462006079027356, |
|
"grad_norm": 0.22387352091239793, |
|
"learning_rate": 1.3662785384573663e-06, |
|
"loss": 0.3425, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 4.4660587639311045, |
|
"grad_norm": 0.2249058767071061, |
|
"learning_rate": 1.3457351939959383e-06, |
|
"loss": 0.3396, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 4.470111448834853, |
|
"grad_norm": 0.23870232680622352, |
|
"learning_rate": 1.3253420884187551e-06, |
|
"loss": 0.3405, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 4.474164133738602, |
|
"grad_norm": 0.21607513672934625, |
|
"learning_rate": 1.3050993859690953e-06, |
|
"loss": 0.3328, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 4.47821681864235, |
|
"grad_norm": 0.22535998908174948, |
|
"learning_rate": 1.2850072496788869e-06, |
|
"loss": 0.3554, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 4.4822695035460995, |
|
"grad_norm": 0.21671387099938133, |
|
"learning_rate": 1.2650658413674434e-06, |
|
"loss": 0.35, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 4.486322188449848, |
|
"grad_norm": 0.22130453377452314, |
|
"learning_rate": 1.2452753216401226e-06, |
|
"loss": 0.3334, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 4.490374873353597, |
|
"grad_norm": 0.2404688922288609, |
|
"learning_rate": 1.2256358498870503e-06, |
|
"loss": 0.3433, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 4.494427558257345, |
|
"grad_norm": 0.22385795363554434, |
|
"learning_rate": 1.2061475842818337e-06, |
|
"loss": 0.3461, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 4.498480243161095, |
|
"grad_norm": 0.22413303912779692, |
|
"learning_rate": 1.1868106817802816e-06, |
|
"loss": 0.3436, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 4.502532928064843, |
|
"grad_norm": 0.22122645891361573, |
|
"learning_rate": 1.1676252981191482e-06, |
|
"loss": 0.3338, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 4.506585612968592, |
|
"grad_norm": 0.21376741367814048, |
|
"learning_rate": 1.1485915878148823e-06, |
|
"loss": 0.3635, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 4.51063829787234, |
|
"grad_norm": 0.22603242927238207, |
|
"learning_rate": 1.1297097041623584e-06, |
|
"loss": 0.3223, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 4.514690982776089, |
|
"grad_norm": 0.2331149933292654, |
|
"learning_rate": 1.1109797992336847e-06, |
|
"loss": 0.3258, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 4.518743667679838, |
|
"grad_norm": 0.23959865419619877, |
|
"learning_rate": 1.092402023876933e-06, |
|
"loss": 0.3197, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 4.522796352583587, |
|
"grad_norm": 0.22749193255505235, |
|
"learning_rate": 1.0739765277149527e-06, |
|
"loss": 0.3426, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 4.526849037487335, |
|
"grad_norm": 0.22796993048828815, |
|
"learning_rate": 1.0557034591441596e-06, |
|
"loss": 0.3506, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 4.530901722391084, |
|
"grad_norm": 0.22831565877977422, |
|
"learning_rate": 1.0375829653333324e-06, |
|
"loss": 0.3277, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 4.5349544072948325, |
|
"grad_norm": 0.2203320741275949, |
|
"learning_rate": 1.0196151922224385e-06, |
|
"loss": 0.3494, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 4.539007092198582, |
|
"grad_norm": 0.21633254005013025, |
|
"learning_rate": 1.0018002845214526e-06, |
|
"loss": 0.3275, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 4.54305977710233, |
|
"grad_norm": 0.22961674015653205, |
|
"learning_rate": 9.841383857091947e-07, |
|
"loss": 0.3251, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 4.547112462006079, |
|
"grad_norm": 0.21671390434912122, |
|
"learning_rate": 9.666296380321616e-07, |
|
"loss": 0.343, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 4.5511651469098275, |
|
"grad_norm": 0.22603352068624985, |
|
"learning_rate": 9.492741825034124e-07, |
|
"loss": 0.3398, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 4.555217831813577, |
|
"grad_norm": 0.22069987043277506, |
|
"learning_rate": 9.320721589013892e-07, |
|
"loss": 0.3458, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 4.5592705167173255, |
|
"grad_norm": 0.23010691388882049, |
|
"learning_rate": 9.150237057688339e-07, |
|
"loss": 0.3203, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 4.563323201621074, |
|
"grad_norm": 0.2157397913050929, |
|
"learning_rate": 8.981289604116328e-07, |
|
"loss": 0.3397, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 4.567375886524823, |
|
"grad_norm": 0.21786115695116354, |
|
"learning_rate": 8.813880588977542e-07, |
|
"loss": 0.3319, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 4.571428571428571, |
|
"grad_norm": 0.20464643646357325, |
|
"learning_rate": 8.648011360561126e-07, |
|
"loss": 0.3439, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 4.575481256332321, |
|
"grad_norm": 0.20755239544430837, |
|
"learning_rate": 8.483683254755037e-07, |
|
"loss": 0.344, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 4.579533941236069, |
|
"grad_norm": 0.2089781380363568, |
|
"learning_rate": 8.320897595035227e-07, |
|
"loss": 0.35, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 4.583586626139818, |
|
"grad_norm": 0.22082440420011618, |
|
"learning_rate": 8.159655692455093e-07, |
|
"loss": 0.3095, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 4.587639311043566, |
|
"grad_norm": 0.2413705648165135, |
|
"learning_rate": 7.999958845634648e-07, |
|
"loss": 0.3247, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 4.591691995947315, |
|
"grad_norm": 0.24045589166324063, |
|
"learning_rate": 7.841808340750478e-07, |
|
"loss": 0.3071, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 4.595744680851064, |
|
"grad_norm": 0.22571645579981925, |
|
"learning_rate": 7.685205451524869e-07, |
|
"loss": 0.3435, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 4.599797365754813, |
|
"grad_norm": 0.20648467045148045, |
|
"learning_rate": 7.530151439216027e-07, |
|
"loss": 0.3521, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 4.603850050658561, |
|
"grad_norm": 0.21051011053555221, |
|
"learning_rate": 7.376647552607675e-07, |
|
"loss": 0.3427, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 4.60790273556231, |
|
"grad_norm": 0.217544136467324, |
|
"learning_rate": 7.224695027998963e-07, |
|
"loss": 0.3341, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 4.611955420466058, |
|
"grad_norm": 0.20640277762515988, |
|
"learning_rate": 7.07429508919466e-07, |
|
"loss": 0.3455, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 4.616008105369808, |
|
"grad_norm": 0.21797673582665247, |
|
"learning_rate": 6.925448947495206e-07, |
|
"loss": 0.3434, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 4.620060790273556, |
|
"grad_norm": 0.22022447802223266, |
|
"learning_rate": 6.778157801686936e-07, |
|
"loss": 0.3293, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 4.624113475177305, |
|
"grad_norm": 0.21263282274197012, |
|
"learning_rate": 6.632422838032515e-07, |
|
"loss": 0.3373, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 4.6281661600810535, |
|
"grad_norm": 0.214396956832954, |
|
"learning_rate": 6.488245230261281e-07, |
|
"loss": 0.3544, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 4.632218844984802, |
|
"grad_norm": 0.2148090104291169, |
|
"learning_rate": 6.345626139559868e-07, |
|
"loss": 0.3518, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 4.6362715298885515, |
|
"grad_norm": 0.22783424145149683, |
|
"learning_rate": 6.204566714562866e-07, |
|
"loss": 0.333, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 4.6403242147923, |
|
"grad_norm": 0.21532478922335205, |
|
"learning_rate": 6.06506809134344e-07, |
|
"loss": 0.3493, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 4.644376899696049, |
|
"grad_norm": 0.22297481495250232, |
|
"learning_rate": 5.927131393404373e-07, |
|
"loss": 0.3396, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 4.648429584599797, |
|
"grad_norm": 0.22299241012230464, |
|
"learning_rate": 5.790757731668817e-07, |
|
"loss": 0.3402, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 4.652482269503546, |
|
"grad_norm": 0.22405458848750587, |
|
"learning_rate": 5.655948204471507e-07, |
|
"loss": 0.3389, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 4.656534954407295, |
|
"grad_norm": 0.2059481832033599, |
|
"learning_rate": 5.522703897549875e-07, |
|
"loss": 0.363, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 4.660587639311044, |
|
"grad_norm": 0.22406832732174456, |
|
"learning_rate": 5.391025884035239e-07, |
|
"loss": 0.3192, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 4.664640324214792, |
|
"grad_norm": 0.2123157382455703, |
|
"learning_rate": 5.260915224444207e-07, |
|
"loss": 0.3177, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 4.668693009118541, |
|
"grad_norm": 0.21214183161863634, |
|
"learning_rate": 5.132372966670129e-07, |
|
"loss": 0.3428, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 4.672745694022289, |
|
"grad_norm": 0.22214217040509063, |
|
"learning_rate": 5.005400145974704e-07, |
|
"loss": 0.3228, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 4.676798378926039, |
|
"grad_norm": 0.22206843636459048, |
|
"learning_rate": 4.879997784979562e-07, |
|
"loss": 0.3128, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 4.680851063829787, |
|
"grad_norm": 0.22502942651648752, |
|
"learning_rate": 4.7561668936580984e-07, |
|
"loss": 0.2986, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 4.684903748733536, |
|
"grad_norm": 0.20286793667111946, |
|
"learning_rate": 4.6339084693272306e-07, |
|
"loss": 0.3457, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 4.688956433637284, |
|
"grad_norm": 0.21521927202873165, |
|
"learning_rate": 4.5132234966395847e-07, |
|
"loss": 0.3333, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 4.693009118541033, |
|
"grad_norm": 0.22798314042818463, |
|
"learning_rate": 4.3941129475752795e-07, |
|
"loss": 0.3297, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 4.697061803444782, |
|
"grad_norm": 0.21904215857665196, |
|
"learning_rate": 4.27657778143431e-07, |
|
"loss": 0.3436, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 4.701114488348531, |
|
"grad_norm": 0.22182735929955658, |
|
"learning_rate": 4.1606189448287757e-07, |
|
"loss": 0.3314, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 4.7051671732522795, |
|
"grad_norm": 0.2095186956955109, |
|
"learning_rate": 4.046237371675177e-07, |
|
"loss": 0.3347, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 4.709219858156028, |
|
"grad_norm": 0.22442127204518006, |
|
"learning_rate": 3.9334339831869963e-07, |
|
"loss": 0.3359, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 4.713272543059777, |
|
"grad_norm": 0.21684324027495205, |
|
"learning_rate": 3.8222096878671955e-07, |
|
"loss": 0.3373, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 4.717325227963526, |
|
"grad_norm": 0.2107650959337461, |
|
"learning_rate": 3.7125653815009545e-07, |
|
"loss": 0.3294, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 4.721377912867275, |
|
"grad_norm": 0.20655868278548511, |
|
"learning_rate": 3.6045019471484974e-07, |
|
"loss": 0.3589, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 4.725430597771023, |
|
"grad_norm": 0.21931130102701069, |
|
"learning_rate": 3.498020255137813e-07, |
|
"loss": 0.3214, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 4.729483282674772, |
|
"grad_norm": 0.22416747292732309, |
|
"learning_rate": 3.393121163057811e-07, |
|
"loss": 0.3153, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 4.733535967578521, |
|
"grad_norm": 0.21689993421909323, |
|
"learning_rate": 3.289805515751399e-07, |
|
"loss": 0.3353, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 4.73758865248227, |
|
"grad_norm": 0.22352148295492932, |
|
"learning_rate": 3.188074145308573e-07, |
|
"loss": 0.3325, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 4.741641337386018, |
|
"grad_norm": 0.21260809903189817, |
|
"learning_rate": 3.087927871059804e-07, |
|
"loss": 0.3392, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 4.745694022289767, |
|
"grad_norm": 0.21638797232716142, |
|
"learning_rate": 2.989367499569418e-07, |
|
"loss": 0.3459, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 4.749746707193515, |
|
"grad_norm": 0.22534975566915338, |
|
"learning_rate": 2.8923938246290917e-07, |
|
"loss": 0.3265, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 4.753799392097265, |
|
"grad_norm": 0.21321198039942968, |
|
"learning_rate": 2.7970076272514804e-07, |
|
"loss": 0.3308, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 4.757852077001013, |
|
"grad_norm": 0.22012878527457655, |
|
"learning_rate": 2.703209675663887e-07, |
|
"loss": 0.3522, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 4.761904761904762, |
|
"grad_norm": 0.21029964979907614, |
|
"learning_rate": 2.6110007253021374e-07, |
|
"loss": 0.3366, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 4.76595744680851, |
|
"grad_norm": 0.20580076966125707, |
|
"learning_rate": 2.520381518804471e-07, |
|
"loss": 0.3541, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 4.77001013171226, |
|
"grad_norm": 0.2134745021807859, |
|
"learning_rate": 2.4313527860054585e-07, |
|
"loss": 0.3193, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 4.774062816616008, |
|
"grad_norm": 0.22585064205156016, |
|
"learning_rate": 2.343915243930317e-07, |
|
"loss": 0.3252, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 4.778115501519757, |
|
"grad_norm": 0.21603823512286785, |
|
"learning_rate": 2.2580695967889367e-07, |
|
"loss": 0.348, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 4.7821681864235055, |
|
"grad_norm": 0.21653647329587522, |
|
"learning_rate": 2.1738165359704189e-07, |
|
"loss": 0.3204, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 4.786220871327254, |
|
"grad_norm": 0.21205769892470547, |
|
"learning_rate": 2.0911567400373257e-07, |
|
"loss": 0.3353, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 4.7902735562310035, |
|
"grad_norm": 0.21549336304459712, |
|
"learning_rate": 2.0100908747202607e-07, |
|
"loss": 0.346, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 4.794326241134752, |
|
"grad_norm": 0.21059046847754115, |
|
"learning_rate": 1.9306195929125638e-07, |
|
"loss": 0.3306, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 4.798378926038501, |
|
"grad_norm": 0.22735381874544033, |
|
"learning_rate": 1.8527435346650247e-07, |
|
"loss": 0.3157, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 4.802431610942249, |
|
"grad_norm": 0.2198041799958636, |
|
"learning_rate": 1.7764633271807108e-07, |
|
"loss": 0.3316, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 4.806484295845998, |
|
"grad_norm": 0.21349275524365252, |
|
"learning_rate": 1.7017795848099262e-07, |
|
"loss": 0.3382, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 4.810536980749747, |
|
"grad_norm": 0.20634207760035184, |
|
"learning_rate": 1.6286929090452596e-07, |
|
"loss": 0.3595, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 4.814589665653496, |
|
"grad_norm": 0.2149452566773372, |
|
"learning_rate": 1.557203888516745e-07, |
|
"loss": 0.3254, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 4.818642350557244, |
|
"grad_norm": 0.2050290266477185, |
|
"learning_rate": 1.487313098987131e-07, |
|
"loss": 0.3486, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 4.822695035460993, |
|
"grad_norm": 0.21730349274723942, |
|
"learning_rate": 1.4190211033472402e-07, |
|
"loss": 0.3138, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 4.826747720364741, |
|
"grad_norm": 0.2138676861848332, |
|
"learning_rate": 1.3523284516113955e-07, |
|
"loss": 0.3143, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 4.830800405268491, |
|
"grad_norm": 0.21788352088186105, |
|
"learning_rate": 1.2872356809130682e-07, |
|
"loss": 0.3216, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 4.834853090172239, |
|
"grad_norm": 0.2265025016281195, |
|
"learning_rate": 1.2237433155004807e-07, |
|
"loss": 0.3134, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 4.838905775075988, |
|
"grad_norm": 0.22179359558190445, |
|
"learning_rate": 1.1618518667323886e-07, |
|
"loss": 0.34, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 4.842958459979736, |
|
"grad_norm": 0.2259526528670437, |
|
"learning_rate": 1.1015618330740385e-07, |
|
"loss": 0.3226, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 4.847011144883485, |
|
"grad_norm": 0.22281249929212246, |
|
"learning_rate": 1.042873700093061e-07, |
|
"loss": 0.3367, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 4.851063829787234, |
|
"grad_norm": 0.20646844314830884, |
|
"learning_rate": 9.857879404556291e-08, |
|
"loss": 0.3516, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 4.855116514690983, |
|
"grad_norm": 0.20917766627740703, |
|
"learning_rate": 9.303050139225722e-08, |
|
"loss": 0.3445, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 4.8591691995947315, |
|
"grad_norm": 0.21438077777876302, |
|
"learning_rate": 8.76425367345779e-08, |
|
"loss": 0.348, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 4.86322188449848, |
|
"grad_norm": 0.21386021572345543, |
|
"learning_rate": 8.241494346644897e-08, |
|
"loss": 0.3438, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 4.867274569402229, |
|
"grad_norm": 0.2219330645543717, |
|
"learning_rate": 7.734776369019204e-08, |
|
"loss": 0.3293, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 4.871327254305978, |
|
"grad_norm": 0.21913258808872252, |
|
"learning_rate": 7.244103821617332e-08, |
|
"loss": 0.3483, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 4.875379939209727, |
|
"grad_norm": 0.2046396443128444, |
|
"learning_rate": 6.769480656248606e-08, |
|
"loss": 0.3666, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 4.879432624113475, |
|
"grad_norm": 0.2235359374333409, |
|
"learning_rate": 6.310910695462635e-08, |
|
"loss": 0.3498, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 4.883485309017224, |
|
"grad_norm": 0.21714147168119471, |
|
"learning_rate": 5.8683976325191185e-08, |
|
"loss": 0.3255, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 4.887537993920972, |
|
"grad_norm": 0.20965957836909663, |
|
"learning_rate": 5.4419450313571984e-08, |
|
"loss": 0.3388, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 4.891590678824722, |
|
"grad_norm": 0.21025007654825317, |
|
"learning_rate": 5.031556326567488e-08, |
|
"loss": 0.3325, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 4.89564336372847, |
|
"grad_norm": 0.22045886448240218, |
|
"learning_rate": 4.637234823364312e-08, |
|
"loss": 0.3395, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 4.899696048632219, |
|
"grad_norm": 0.2216809920246643, |
|
"learning_rate": 4.258983697558838e-08, |
|
"loss": 0.313, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 4.903748733535967, |
|
"grad_norm": 0.21919342892553204, |
|
"learning_rate": 3.896805995533548e-08, |
|
"loss": 0.3327, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 4.907801418439716, |
|
"grad_norm": 0.22963652477786461, |
|
"learning_rate": 3.550704634218028e-08, |
|
"loss": 0.3243, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 4.911854103343465, |
|
"grad_norm": 0.21024186093097164, |
|
"learning_rate": 3.2206824010647676e-08, |
|
"loss": 0.3526, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 4.915906788247214, |
|
"grad_norm": 0.22336999236382776, |
|
"learning_rate": 2.9067419540278476e-08, |
|
"loss": 0.3265, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 4.919959473150962, |
|
"grad_norm": 0.2201397513245513, |
|
"learning_rate": 2.6088858215400638e-08, |
|
"loss": 0.3363, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 4.924012158054711, |
|
"grad_norm": 0.22111502149214282, |
|
"learning_rate": 2.3271164024940564e-08, |
|
"loss": 0.3179, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 4.92806484295846, |
|
"grad_norm": 0.22260370419361142, |
|
"learning_rate": 2.061435966221881e-08, |
|
"loss": 0.333, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 4.932117527862209, |
|
"grad_norm": 0.21061532746297282, |
|
"learning_rate": 1.811846652477245e-08, |
|
"loss": 0.3368, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 4.9361702127659575, |
|
"grad_norm": 0.2324705516186909, |
|
"learning_rate": 1.5783504714184106e-08, |
|
"loss": 0.3124, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 4.940222897669706, |
|
"grad_norm": 0.21503925446600383, |
|
"learning_rate": 1.360949303591097e-08, |
|
"loss": 0.3327, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 4.944275582573455, |
|
"grad_norm": 0.21453766341928934, |
|
"learning_rate": 1.1596448999144916e-08, |
|
"loss": 0.329, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 4.948328267477204, |
|
"grad_norm": 0.21274947142888392, |
|
"learning_rate": 9.744388816668172e-09, |
|
"loss": 0.3356, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 4.9523809523809526, |
|
"grad_norm": 0.21256060764333481, |
|
"learning_rate": 8.05332740472009e-09, |
|
"loss": 0.3208, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 4.956433637284701, |
|
"grad_norm": 0.2162259550966047, |
|
"learning_rate": 6.523278382872811e-09, |
|
"loss": 0.3329, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 4.96048632218845, |
|
"grad_norm": 0.21256351533196707, |
|
"learning_rate": 5.15425407393133e-09, |
|
"loss": 0.3527, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 4.964539007092198, |
|
"grad_norm": 0.2142136871122153, |
|
"learning_rate": 3.94626550383137e-09, |
|
"loss": 0.3372, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 4.968591691995948, |
|
"grad_norm": 0.20876176461532178, |
|
"learning_rate": 2.899322401546112e-09, |
|
"loss": 0.3445, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 4.972644376899696, |
|
"grad_norm": 0.21723609682700945, |
|
"learning_rate": 2.013433199010706e-09, |
|
"loss": 0.3397, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 4.976697061803445, |
|
"grad_norm": 0.21289286076311653, |
|
"learning_rate": 1.2886050310556563e-09, |
|
"loss": 0.3297, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 4.980749746707193, |
|
"grad_norm": 0.22390735712541313, |
|
"learning_rate": 7.248437353468695e-10, |
|
"loss": 0.3258, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 4.984802431610943, |
|
"grad_norm": 0.23301239046115135, |
|
"learning_rate": 3.221538523412449e-10, |
|
"loss": 0.353, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 4.988855116514691, |
|
"grad_norm": 0.21838384856465362, |
|
"learning_rate": 8.053862524670663e-11, |
|
"loss": 0.353, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 4.99290780141844, |
|
"grad_norm": 0.23683621287256168, |
|
"learning_rate": 0.0, |
|
"loss": 0.3136, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 4.99290780141844, |
|
"step": 1230, |
|
"total_flos": 4.845566732934316e+18, |
|
"train_loss": 0.46200237676380124, |
|
"train_runtime": 53793.4177, |
|
"train_samples_per_second": 2.935, |
|
"train_steps_per_second": 0.023 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1230, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4.845566732934316e+18, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|