|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.984709480122324, |
|
"eval_steps": 500, |
|
"global_step": 366, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.08154943934760449, |
|
"grad_norm": 1.5832443237304688, |
|
"learning_rate": 2.702702702702703e-05, |
|
"loss": 1.8972, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.16309887869520898, |
|
"grad_norm": 1.6229541301727295, |
|
"learning_rate": 5.405405405405406e-05, |
|
"loss": 1.7883, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.24464831804281345, |
|
"grad_norm": 1.407547116279602, |
|
"learning_rate": 8.108108108108109e-05, |
|
"loss": 1.3865, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.32619775739041795, |
|
"grad_norm": 2.573899745941162, |
|
"learning_rate": 9.999088210158001e-05, |
|
"loss": 1.3647, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.4077471967380224, |
|
"grad_norm": 2.391404390335083, |
|
"learning_rate": 9.967210469256656e-05, |
|
"loss": 1.2114, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.4892966360856269, |
|
"grad_norm": 1.054701805114746, |
|
"learning_rate": 9.890075235781779e-05, |
|
"loss": 1.2255, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.5708460754332314, |
|
"grad_norm": 1.9387873411178589, |
|
"learning_rate": 9.768385308070138e-05, |
|
"loss": 1.3242, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.6523955147808359, |
|
"grad_norm": 0.961560070514679, |
|
"learning_rate": 9.603249433382144e-05, |
|
"loss": 1.106, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.7339449541284404, |
|
"grad_norm": 0.783678412437439, |
|
"learning_rate": 9.396172205829234e-05, |
|
"loss": 1.1686, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.8154943934760448, |
|
"grad_norm": 1.3414908647537231, |
|
"learning_rate": 9.149040357641929e-05, |
|
"loss": 1.148, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.8970438328236493, |
|
"grad_norm": 1.628290057182312, |
|
"learning_rate": 8.864105568682244e-05, |
|
"loss": 1.3969, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.9785932721712538, |
|
"grad_norm": 1.0776128768920898, |
|
"learning_rate": 8.543963950827279e-05, |
|
"loss": 1.1595, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.0601427115188584, |
|
"grad_norm": 0.6094745993614197, |
|
"learning_rate": 8.191532394146865e-05, |
|
"loss": 1.091, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.1416921508664628, |
|
"grad_norm": 1.0875447988510132, |
|
"learning_rate": 7.810021990391164e-05, |
|
"loss": 1.0718, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.2232415902140672, |
|
"grad_norm": 0.9140685200691223, |
|
"learning_rate": 7.402908775933419e-05, |
|
"loss": 1.0833, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.3047910295616718, |
|
"grad_norm": 0.7726348638534546, |
|
"learning_rate": 6.973902060736226e-05, |
|
"loss": 1.073, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.3863404689092762, |
|
"grad_norm": 1.4581207036972046, |
|
"learning_rate": 6.526910631903973e-05, |
|
"loss": 1.0823, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.4678899082568808, |
|
"grad_norm": 0.8327229022979736, |
|
"learning_rate": 6.0660071397493514e-05, |
|
"loss": 0.9893, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.5494393476044852, |
|
"grad_norm": 0.6057823300361633, |
|
"learning_rate": 5.5953909908613114e-05, |
|
"loss": 0.9235, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.6309887869520896, |
|
"grad_norm": 0.7681270241737366, |
|
"learning_rate": 5.119350086265004e-05, |
|
"loss": 1.012, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.7125382262996942, |
|
"grad_norm": 1.238531231880188, |
|
"learning_rate": 4.64222175328687e-05, |
|
"loss": 1.0746, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.7940876656472988, |
|
"grad_norm": 1.5333278179168701, |
|
"learning_rate": 4.1683532270843504e-05, |
|
"loss": 1.0924, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.8756371049949032, |
|
"grad_norm": 1.807810664176941, |
|
"learning_rate": 3.7020620419029094e-05, |
|
"loss": 1.0545, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.9571865443425076, |
|
"grad_norm": 1.5522844791412354, |
|
"learning_rate": 3.2475966929454504e-05, |
|
"loss": 1.0335, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.038735983690112, |
|
"grad_norm": 1.2546252012252808, |
|
"learning_rate": 2.8090979272736662e-05, |
|
"loss": 0.9929, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.120285423037717, |
|
"grad_norm": 1.7230066061019897, |
|
"learning_rate": 2.3905610164295394e-05, |
|
"loss": 0.9195, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.2018348623853212, |
|
"grad_norm": 1.0860559940338135, |
|
"learning_rate": 1.995799354520598e-05, |
|
"loss": 0.8906, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.2833843017329256, |
|
"grad_norm": 1.6877241134643555, |
|
"learning_rate": 1.6284097134357536e-05, |
|
"loss": 0.8852, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.36493374108053, |
|
"grad_norm": 0.8254657983779907, |
|
"learning_rate": 1.2917394717602121e-05, |
|
"loss": 0.8899, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.4464831804281344, |
|
"grad_norm": 1.212756633758545, |
|
"learning_rate": 9.888561159748993e-06, |
|
"loss": 0.9932, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.528032619775739, |
|
"grad_norm": 1.147220253944397, |
|
"learning_rate": 7.225192918226214e-06, |
|
"loss": 0.8445, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.6095820591233436, |
|
"grad_norm": 1.090319275856018, |
|
"learning_rate": 4.951556604879048e-06, |
|
"loss": 0.8961, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 2.691131498470948, |
|
"grad_norm": 0.7078003287315369, |
|
"learning_rate": 3.0883678868214806e-06, |
|
"loss": 0.9571, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.7726809378185524, |
|
"grad_norm": 0.9578835964202881, |
|
"learning_rate": 1.6526027408301226e-06, |
|
"loss": 0.9048, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 2.8542303771661572, |
|
"grad_norm": 2.023444652557373, |
|
"learning_rate": 6.573427809888067e-07, |
|
"loss": 0.8868, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.9357798165137616, |
|
"grad_norm": 1.268535852432251, |
|
"learning_rate": 1.1165606884234181e-07, |
|
"loss": 0.9579, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.984709480122324, |
|
"step": 366, |
|
"total_flos": 2.6312102502948864e+16, |
|
"train_loss": 1.1005799705213537, |
|
"train_runtime": 684.9766, |
|
"train_samples_per_second": 4.296, |
|
"train_steps_per_second": 0.534 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 366, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": false, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.6312102502948864e+16, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|