|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 6.72, |
|
"eval_steps": 500, |
|
"global_step": 70, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.096, |
|
"grad_norm": 6.991291106423771, |
|
"learning_rate": 2.8571428571428573e-06, |
|
"loss": 1.0604, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.192, |
|
"grad_norm": 6.9404392087473, |
|
"learning_rate": 5.7142857142857145e-06, |
|
"loss": 1.0566, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.288, |
|
"grad_norm": 6.417751052511398, |
|
"learning_rate": 8.571428571428571e-06, |
|
"loss": 1.0483, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.384, |
|
"grad_norm": 3.242138270087566, |
|
"learning_rate": 1.1428571428571429e-05, |
|
"loss": 0.9819, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 4.267046592642321, |
|
"learning_rate": 1.4285714285714287e-05, |
|
"loss": 0.9495, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.576, |
|
"grad_norm": 4.449643412615094, |
|
"learning_rate": 1.7142857142857142e-05, |
|
"loss": 0.9476, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.672, |
|
"grad_norm": 4.2543563479887805, |
|
"learning_rate": 2e-05, |
|
"loss": 0.8945, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.768, |
|
"grad_norm": 3.04655925911499, |
|
"learning_rate": 1.9987569212189224e-05, |
|
"loss": 0.8817, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.864, |
|
"grad_norm": 2.6061134673928112, |
|
"learning_rate": 1.9950307753654016e-05, |
|
"loss": 0.8523, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.8345700681584693, |
|
"learning_rate": 1.9888308262251286e-05, |
|
"loss": 0.8179, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 1.056, |
|
"grad_norm": 1.6553945729979291, |
|
"learning_rate": 1.9801724878485438e-05, |
|
"loss": 0.79, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 1.152, |
|
"grad_norm": 1.7435599135273063, |
|
"learning_rate": 1.969077286229078e-05, |
|
"loss": 0.7653, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 1.248, |
|
"grad_norm": 1.285420005987591, |
|
"learning_rate": 1.955572805786141e-05, |
|
"loss": 0.7539, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 1.3439999999999999, |
|
"grad_norm": 1.4808992380192267, |
|
"learning_rate": 1.9396926207859085e-05, |
|
"loss": 0.7555, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 1.2689474653555437, |
|
"learning_rate": 1.921476211870408e-05, |
|
"loss": 0.7299, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 1.536, |
|
"grad_norm": 1.3090980059946602, |
|
"learning_rate": 1.900968867902419e-05, |
|
"loss": 0.7253, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 1.6320000000000001, |
|
"grad_norm": 1.0613800408829623, |
|
"learning_rate": 1.8782215733702286e-05, |
|
"loss": 0.7176, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 1.728, |
|
"grad_norm": 1.0428487681920615, |
|
"learning_rate": 1.8532908816321557e-05, |
|
"loss": 0.7211, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 1.8239999999999998, |
|
"grad_norm": 1.009791001357855, |
|
"learning_rate": 1.826238774315995e-05, |
|
"loss": 0.6964, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.8816529724390634, |
|
"learning_rate": 1.7971325072229227e-05, |
|
"loss": 0.7113, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 2.016, |
|
"grad_norm": 0.9551087649605005, |
|
"learning_rate": 1.766044443118978e-05, |
|
"loss": 0.7026, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 2.112, |
|
"grad_norm": 0.7627964904216537, |
|
"learning_rate": 1.7330518718298263e-05, |
|
"loss": 0.6638, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 2.208, |
|
"grad_norm": 0.848678521181871, |
|
"learning_rate": 1.698236818086073e-05, |
|
"loss": 0.6567, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 2.304, |
|
"grad_norm": 0.6539475241251977, |
|
"learning_rate": 1.6616858375968596e-05, |
|
"loss": 0.6487, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.8990612791765852, |
|
"learning_rate": 1.6234898018587336e-05, |
|
"loss": 0.6344, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 2.496, |
|
"grad_norm": 0.7416213329212227, |
|
"learning_rate": 1.5837436722347902e-05, |
|
"loss": 0.6484, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 2.592, |
|
"grad_norm": 0.7328908465586557, |
|
"learning_rate": 1.5425462638657597e-05, |
|
"loss": 0.6299, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 2.6879999999999997, |
|
"grad_norm": 0.7206262580746776, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 0.6169, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 2.784, |
|
"grad_norm": 0.6434789028128752, |
|
"learning_rate": 1.4562106573531632e-05, |
|
"loss": 0.6405, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 0.6347294733542639, |
|
"learning_rate": 1.4112871031306118e-05, |
|
"loss": 0.6093, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 2.976, |
|
"grad_norm": 0.6152630596334819, |
|
"learning_rate": 1.3653410243663953e-05, |
|
"loss": 0.6015, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 3.072, |
|
"grad_norm": 0.748786730756543, |
|
"learning_rate": 1.3184866502516846e-05, |
|
"loss": 0.5864, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 3.168, |
|
"grad_norm": 0.48685398125035, |
|
"learning_rate": 1.2708404681430054e-05, |
|
"loss": 0.5867, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 3.2640000000000002, |
|
"grad_norm": 0.5373564725675727, |
|
"learning_rate": 1.2225209339563144e-05, |
|
"loss": 0.5782, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"grad_norm": 0.535644151595081, |
|
"learning_rate": 1.1736481776669307e-05, |
|
"loss": 0.5728, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 3.456, |
|
"grad_norm": 0.44261470044848933, |
|
"learning_rate": 1.1243437046474854e-05, |
|
"loss": 0.5304, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 3.552, |
|
"grad_norm": 0.48019507781488413, |
|
"learning_rate": 1.0747300935864245e-05, |
|
"loss": 0.5532, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 3.648, |
|
"grad_norm": 0.43770022949236825, |
|
"learning_rate": 1.0249306917380731e-05, |
|
"loss": 0.5714, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 3.7439999999999998, |
|
"grad_norm": 0.4910501759018241, |
|
"learning_rate": 9.750693082619274e-06, |
|
"loss": 0.5547, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"grad_norm": 0.48029309503660694, |
|
"learning_rate": 9.252699064135759e-06, |
|
"loss": 0.5452, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 3.936, |
|
"grad_norm": 0.46085280409568424, |
|
"learning_rate": 8.756562953525151e-06, |
|
"loss": 0.5773, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 4.032, |
|
"grad_norm": 0.4369558520317486, |
|
"learning_rate": 8.263518223330698e-06, |
|
"loss": 0.5598, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 4.128, |
|
"grad_norm": 0.5072993680368153, |
|
"learning_rate": 7.774790660436857e-06, |
|
"loss": 0.5164, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 4.224, |
|
"grad_norm": 0.3773968253479734, |
|
"learning_rate": 7.291595318569951e-06, |
|
"loss": 0.5408, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"grad_norm": 0.4481137871258331, |
|
"learning_rate": 6.815133497483157e-06, |
|
"loss": 0.4941, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 4.416, |
|
"grad_norm": 0.47018406908380456, |
|
"learning_rate": 6.34658975633605e-06, |
|
"loss": 0.5284, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 4.5120000000000005, |
|
"grad_norm": 0.5171527881680771, |
|
"learning_rate": 5.887128968693887e-06, |
|
"loss": 0.5014, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 4.608, |
|
"grad_norm": 0.39260580816991114, |
|
"learning_rate": 5.43789342646837e-06, |
|
"loss": 0.5, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 4.704, |
|
"grad_norm": 0.4112236644270957, |
|
"learning_rate": 5.000000000000003e-06, |
|
"loss": 0.4793, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"grad_norm": 0.40770767189599294, |
|
"learning_rate": 4.5745373613424075e-06, |
|
"loss": 0.5223, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 4.896, |
|
"grad_norm": 0.4302454987757317, |
|
"learning_rate": 4.162563277652104e-06, |
|
"loss": 0.5474, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 4.992, |
|
"grad_norm": 0.3557016706548058, |
|
"learning_rate": 3.7651019814126656e-06, |
|
"loss": 0.5191, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 5.088, |
|
"grad_norm": 0.4114725591225338, |
|
"learning_rate": 3.3831416240314085e-06, |
|
"loss": 0.4818, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 5.184, |
|
"grad_norm": 0.3754167163800502, |
|
"learning_rate": 3.017631819139273e-06, |
|
"loss": 0.474, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"grad_norm": 0.32245438031841833, |
|
"learning_rate": 2.669481281701739e-06, |
|
"loss": 0.4917, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 5.376, |
|
"grad_norm": 0.34296759997920345, |
|
"learning_rate": 2.339555568810221e-06, |
|
"loss": 0.4908, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 5.4719999999999995, |
|
"grad_norm": 0.3902739816060275, |
|
"learning_rate": 2.0286749277707783e-06, |
|
"loss": 0.4724, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 5.568, |
|
"grad_norm": 0.35043080542410515, |
|
"learning_rate": 1.7376122568400533e-06, |
|
"loss": 0.4812, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 5.664, |
|
"grad_norm": 0.30341222784616684, |
|
"learning_rate": 1.467091183678444e-06, |
|
"loss": 0.4915, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"grad_norm": 0.32931258071115643, |
|
"learning_rate": 1.2177842662977136e-06, |
|
"loss": 0.4941, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 5.856, |
|
"grad_norm": 0.334337445206413, |
|
"learning_rate": 9.903113209758098e-07, |
|
"loss": 0.4871, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 5.952, |
|
"grad_norm": 0.3088070421813325, |
|
"learning_rate": 7.852378812959227e-07, |
|
"loss": 0.5032, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 6.048, |
|
"grad_norm": 0.3594182587859646, |
|
"learning_rate": 6.030737921409169e-07, |
|
"loss": 0.4605, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 6.144, |
|
"grad_norm": 0.3603587818182848, |
|
"learning_rate": 4.4427194213859216e-07, |
|
"loss": 0.4592, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"grad_norm": 0.28400234193835516, |
|
"learning_rate": 3.0922713770922155e-07, |
|
"loss": 0.4724, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 6.336, |
|
"grad_norm": 0.27318126327227227, |
|
"learning_rate": 1.9827512151456175e-07, |
|
"loss": 0.4849, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 6.432, |
|
"grad_norm": 0.35424266564159845, |
|
"learning_rate": 1.1169173774871478e-07, |
|
"loss": 0.4629, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 6.5280000000000005, |
|
"grad_norm": 0.2619950940084347, |
|
"learning_rate": 4.9692246345985905e-08, |
|
"loss": 0.4865, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 6.624, |
|
"grad_norm": 0.25288925538925616, |
|
"learning_rate": 1.2430787810776556e-08, |
|
"loss": 0.4741, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"grad_norm": 0.27245162033242015, |
|
"learning_rate": 0.0, |
|
"loss": 0.4932, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"step": 70, |
|
"total_flos": 2.46611802945749e+17, |
|
"train_loss": 0.6276680737733841, |
|
"train_runtime": 7354.819, |
|
"train_samples_per_second": 0.952, |
|
"train_steps_per_second": 0.01 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 70, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 7, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.46611802945749e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|