|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 6.666666666666667, |
|
"eval_steps": 500, |
|
"global_step": 70, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.09523809523809523, |
|
"grad_norm": 6.5314817126396605, |
|
"learning_rate": 2.8571428571428573e-06, |
|
"loss": 1.039, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.19047619047619047, |
|
"grad_norm": 6.411226211105682, |
|
"learning_rate": 5.7142857142857145e-06, |
|
"loss": 1.007, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.2857142857142857, |
|
"grad_norm": 6.03133238655822, |
|
"learning_rate": 8.571428571428571e-06, |
|
"loss": 1.0241, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.38095238095238093, |
|
"grad_norm": 2.7614466505709547, |
|
"learning_rate": 1.1428571428571429e-05, |
|
"loss": 0.9256, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.47619047619047616, |
|
"grad_norm": 4.229667625274101, |
|
"learning_rate": 1.4285714285714287e-05, |
|
"loss": 0.9246, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.5714285714285714, |
|
"grad_norm": 4.271242611972377, |
|
"learning_rate": 1.7142857142857142e-05, |
|
"loss": 0.9244, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 4.175033590443489, |
|
"learning_rate": 2e-05, |
|
"loss": 0.9028, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.7619047619047619, |
|
"grad_norm": 2.987240488063728, |
|
"learning_rate": 1.9987569212189224e-05, |
|
"loss": 0.8658, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.8571428571428571, |
|
"grad_norm": 2.2208403898960354, |
|
"learning_rate": 1.9950307753654016e-05, |
|
"loss": 0.8271, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.9523809523809523, |
|
"grad_norm": 1.6368209966441267, |
|
"learning_rate": 1.9888308262251286e-05, |
|
"loss": 0.8045, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 1.0476190476190477, |
|
"grad_norm": 1.7407139578415196, |
|
"learning_rate": 1.9801724878485438e-05, |
|
"loss": 1.1541, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 1.1428571428571428, |
|
"grad_norm": 1.4903732825489864, |
|
"learning_rate": 1.969077286229078e-05, |
|
"loss": 0.8183, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 1.2380952380952381, |
|
"grad_norm": 1.1387314756946674, |
|
"learning_rate": 1.955572805786141e-05, |
|
"loss": 0.7249, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 1.3333333333333333, |
|
"grad_norm": 0.9622986586737649, |
|
"learning_rate": 1.9396926207859085e-05, |
|
"loss": 0.6702, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 1.4285714285714286, |
|
"grad_norm": 1.202437985998727, |
|
"learning_rate": 1.921476211870408e-05, |
|
"loss": 0.697, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 1.5238095238095237, |
|
"grad_norm": 1.055134927131131, |
|
"learning_rate": 1.900968867902419e-05, |
|
"loss": 0.7302, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 1.619047619047619, |
|
"grad_norm": 0.8266818011748471, |
|
"learning_rate": 1.8782215733702286e-05, |
|
"loss": 0.6992, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 1.7142857142857144, |
|
"grad_norm": 1.0098700924964996, |
|
"learning_rate": 1.8532908816321557e-05, |
|
"loss": 0.6992, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 1.8095238095238095, |
|
"grad_norm": 0.9110314401306685, |
|
"learning_rate": 1.826238774315995e-05, |
|
"loss": 0.7385, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 1.9047619047619047, |
|
"grad_norm": 0.7293179813266315, |
|
"learning_rate": 1.7971325072229227e-05, |
|
"loss": 0.6771, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 1.4116554460831903, |
|
"learning_rate": 1.766044443118978e-05, |
|
"loss": 1.036, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 2.0952380952380953, |
|
"grad_norm": 0.6717927696411902, |
|
"learning_rate": 1.7330518718298263e-05, |
|
"loss": 0.6562, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 2.1904761904761907, |
|
"grad_norm": 0.7856660243507388, |
|
"learning_rate": 1.698236818086073e-05, |
|
"loss": 0.6406, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 2.2857142857142856, |
|
"grad_norm": 0.6907398105902497, |
|
"learning_rate": 1.6616858375968596e-05, |
|
"loss": 0.6382, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 2.380952380952381, |
|
"grad_norm": 0.578023955651648, |
|
"learning_rate": 1.6234898018587336e-05, |
|
"loss": 0.6324, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 2.4761904761904763, |
|
"grad_norm": 0.7057408759686685, |
|
"learning_rate": 1.5837436722347902e-05, |
|
"loss": 0.626, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 2.571428571428571, |
|
"grad_norm": 0.6695515982268823, |
|
"learning_rate": 1.5425462638657597e-05, |
|
"loss": 0.6271, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 2.6666666666666665, |
|
"grad_norm": 0.5483892523701176, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 0.6146, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 2.761904761904762, |
|
"grad_norm": 0.5791212339845703, |
|
"learning_rate": 1.4562106573531632e-05, |
|
"loss": 0.6008, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 2.857142857142857, |
|
"grad_norm": 0.6227576341749919, |
|
"learning_rate": 1.4112871031306118e-05, |
|
"loss": 0.6045, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 2.9523809523809526, |
|
"grad_norm": 0.56328099334905, |
|
"learning_rate": 1.3653410243663953e-05, |
|
"loss": 0.5922, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 3.0476190476190474, |
|
"grad_norm": 0.917071302944978, |
|
"learning_rate": 1.3184866502516846e-05, |
|
"loss": 0.8831, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 3.142857142857143, |
|
"grad_norm": 0.47392367492469706, |
|
"learning_rate": 1.2708404681430054e-05, |
|
"loss": 0.5957, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 3.238095238095238, |
|
"grad_norm": 0.6220069709358489, |
|
"learning_rate": 1.2225209339563144e-05, |
|
"loss": 0.5322, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 3.3333333333333335, |
|
"grad_norm": 0.44117649459601027, |
|
"learning_rate": 1.1736481776669307e-05, |
|
"loss": 0.574, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 3.4285714285714284, |
|
"grad_norm": 0.5000691267266321, |
|
"learning_rate": 1.1243437046474854e-05, |
|
"loss": 0.5403, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 3.5238095238095237, |
|
"grad_norm": 0.5471569488938381, |
|
"learning_rate": 1.0747300935864245e-05, |
|
"loss": 0.5499, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 3.619047619047619, |
|
"grad_norm": 0.5206224380615669, |
|
"learning_rate": 1.0249306917380731e-05, |
|
"loss": 0.5704, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 3.7142857142857144, |
|
"grad_norm": 0.4992983089213398, |
|
"learning_rate": 9.750693082619274e-06, |
|
"loss": 0.5863, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 3.8095238095238093, |
|
"grad_norm": 0.4532435197061412, |
|
"learning_rate": 9.252699064135759e-06, |
|
"loss": 0.5153, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 3.9047619047619047, |
|
"grad_norm": 0.47188984858284133, |
|
"learning_rate": 8.756562953525151e-06, |
|
"loss": 0.5561, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 0.7393169355556092, |
|
"learning_rate": 8.263518223330698e-06, |
|
"loss": 0.8115, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 4.095238095238095, |
|
"grad_norm": 0.4300138104002246, |
|
"learning_rate": 7.774790660436857e-06, |
|
"loss": 0.5274, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 4.190476190476191, |
|
"grad_norm": 0.5486304902107627, |
|
"learning_rate": 7.291595318569951e-06, |
|
"loss": 0.5042, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 4.285714285714286, |
|
"grad_norm": 0.3912659614188877, |
|
"learning_rate": 6.815133497483157e-06, |
|
"loss": 0.514, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 4.380952380952381, |
|
"grad_norm": 0.43859703727535104, |
|
"learning_rate": 6.34658975633605e-06, |
|
"loss": 0.5102, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 4.476190476190476, |
|
"grad_norm": 0.4152072796435691, |
|
"learning_rate": 5.887128968693887e-06, |
|
"loss": 0.5084, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 4.571428571428571, |
|
"grad_norm": 0.378279031747347, |
|
"learning_rate": 5.43789342646837e-06, |
|
"loss": 0.5069, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 4.666666666666667, |
|
"grad_norm": 0.40105278834779906, |
|
"learning_rate": 5.000000000000003e-06, |
|
"loss": 0.4998, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 4.761904761904762, |
|
"grad_norm": 0.38485178131265124, |
|
"learning_rate": 4.5745373613424075e-06, |
|
"loss": 0.5121, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 4.857142857142857, |
|
"grad_norm": 0.3568969456024383, |
|
"learning_rate": 4.162563277652104e-06, |
|
"loss": 0.5037, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 4.9523809523809526, |
|
"grad_norm": 0.3917108399606805, |
|
"learning_rate": 3.7651019814126656e-06, |
|
"loss": 0.5084, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 5.0476190476190474, |
|
"grad_norm": 0.5625678926141774, |
|
"learning_rate": 3.3831416240314085e-06, |
|
"loss": 0.7461, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 5.142857142857143, |
|
"grad_norm": 0.33638019603674757, |
|
"learning_rate": 3.017631819139273e-06, |
|
"loss": 0.4874, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 5.238095238095238, |
|
"grad_norm": 0.41949833161994055, |
|
"learning_rate": 2.669481281701739e-06, |
|
"loss": 0.4867, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 5.333333333333333, |
|
"grad_norm": 0.34434070204569706, |
|
"learning_rate": 2.339555568810221e-06, |
|
"loss": 0.4773, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 5.428571428571429, |
|
"grad_norm": 0.31702096619813974, |
|
"learning_rate": 2.0286749277707783e-06, |
|
"loss": 0.4981, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 5.523809523809524, |
|
"grad_norm": 0.3100134818597013, |
|
"learning_rate": 1.7376122568400533e-06, |
|
"loss": 0.4494, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 5.619047619047619, |
|
"grad_norm": 0.6463917980073877, |
|
"learning_rate": 1.467091183678444e-06, |
|
"loss": 0.5017, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 5.714285714285714, |
|
"grad_norm": 0.3288971269174857, |
|
"learning_rate": 1.2177842662977136e-06, |
|
"loss": 0.4787, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 5.809523809523809, |
|
"grad_norm": 0.3157943689804271, |
|
"learning_rate": 9.903113209758098e-07, |
|
"loss": 0.4815, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 5.904761904761905, |
|
"grad_norm": 0.29591155402427793, |
|
"learning_rate": 7.852378812959227e-07, |
|
"loss": 0.4704, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"grad_norm": 0.4533644761517681, |
|
"learning_rate": 6.030737921409169e-07, |
|
"loss": 0.7154, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 6.095238095238095, |
|
"grad_norm": 0.27677583362031194, |
|
"learning_rate": 4.4427194213859216e-07, |
|
"loss": 0.4838, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 6.190476190476191, |
|
"grad_norm": 0.27759681038860284, |
|
"learning_rate": 3.0922713770922155e-07, |
|
"loss": 0.4567, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 6.285714285714286, |
|
"grad_norm": 0.25534553117450187, |
|
"learning_rate": 1.9827512151456175e-07, |
|
"loss": 0.48, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 6.380952380952381, |
|
"grad_norm": 0.25810299592380054, |
|
"learning_rate": 1.1169173774871478e-07, |
|
"loss": 0.4817, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 6.476190476190476, |
|
"grad_norm": 0.28200325930565023, |
|
"learning_rate": 4.9692246345985905e-08, |
|
"loss": 0.4647, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 6.571428571428571, |
|
"grad_norm": 0.2746241939650419, |
|
"learning_rate": 1.2430787810776556e-08, |
|
"loss": 0.4775, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 6.666666666666667, |
|
"grad_norm": 0.26473488744066087, |
|
"learning_rate": 0.0, |
|
"loss": 0.4753, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 6.666666666666667, |
|
"step": 70, |
|
"total_flos": 2.007763554182103e+17, |
|
"train_loss": 0.6434890138251441, |
|
"train_runtime": 3243.4617, |
|
"train_samples_per_second": 2.158, |
|
"train_steps_per_second": 0.022 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 70, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 7, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.007763554182103e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|