|
{ |
|
"best_metric": 30.6847, |
|
"best_model_checkpoint": "./zhko_mbartLarge_50p_tokenize_run1/checkpoint-22288", |
|
"epoch": 8.0, |
|
"eval_steps": 500, |
|
"global_step": 22288, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 5e-05, |
|
"loss": 2.3311, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.885257940150542e-05, |
|
"loss": 1.9394, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.770515880301083e-05, |
|
"loss": 1.7898, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.655773820451625e-05, |
|
"loss": 1.7155, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.541031760602166e-05, |
|
"loss": 1.6487, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_bleu": 26.5985, |
|
"eval_gen_len": 15.0925, |
|
"eval_loss": 1.5519005060195923, |
|
"eval_runtime": 694.9322, |
|
"eval_samples_per_second": 16.033, |
|
"eval_steps_per_second": 1.003, |
|
"step": 2786 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.4262897007527085e-05, |
|
"loss": 1.5809, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 4.311547640903249e-05, |
|
"loss": 1.3607, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 4.1968055810537915e-05, |
|
"loss": 1.2685, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.082063521204333e-05, |
|
"loss": 1.2206, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 3.9673214613548745e-05, |
|
"loss": 1.1933, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 3.852579401505416e-05, |
|
"loss": 1.1763, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_bleu": 29.1024, |
|
"eval_gen_len": 14.8538, |
|
"eval_loss": 1.4910480976104736, |
|
"eval_runtime": 659.1489, |
|
"eval_samples_per_second": 16.904, |
|
"eval_steps_per_second": 1.057, |
|
"step": 5572 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.7378373416559576e-05, |
|
"loss": 1.097, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 3.623095281806499e-05, |
|
"loss": 0.9458, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 3.5083532219570406e-05, |
|
"loss": 0.9024, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 3.393611162107582e-05, |
|
"loss": 0.8768, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 3.278869102258124e-05, |
|
"loss": 0.8697, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_bleu": 29.5842, |
|
"eval_gen_len": 14.7611, |
|
"eval_loss": 1.5510085821151733, |
|
"eval_runtime": 648.1855, |
|
"eval_samples_per_second": 17.19, |
|
"eval_steps_per_second": 1.075, |
|
"step": 8358 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 3.164127042408666e-05, |
|
"loss": 0.845, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 3.049384982559207e-05, |
|
"loss": 0.7348, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 2.934642922709749e-05, |
|
"loss": 0.6596, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 2.81990086286029e-05, |
|
"loss": 0.6347, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 2.7051588030108316e-05, |
|
"loss": 0.6254, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 2.5904167431613734e-05, |
|
"loss": 0.6221, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_bleu": 29.6959, |
|
"eval_gen_len": 14.7091, |
|
"eval_loss": 1.6445263624191284, |
|
"eval_runtime": 651.4065, |
|
"eval_samples_per_second": 17.105, |
|
"eval_steps_per_second": 1.07, |
|
"step": 11144 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 2.475674683311915e-05, |
|
"loss": 0.5811, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 2.3609326234624564e-05, |
|
"loss": 0.4843, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 2.2461905636129983e-05, |
|
"loss": 0.4545, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 2.1314485037635395e-05, |
|
"loss": 0.4409, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 2.0167064439140813e-05, |
|
"loss": 0.4444, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_bleu": 29.6231, |
|
"eval_gen_len": 14.6204, |
|
"eval_loss": 1.7175551652908325, |
|
"eval_runtime": 647.6814, |
|
"eval_samples_per_second": 17.203, |
|
"eval_steps_per_second": 1.076, |
|
"step": 13930 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 1.9019643840646228e-05, |
|
"loss": 0.4335, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 1.7872223242151643e-05, |
|
"loss": 0.3781, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 1.672480264365706e-05, |
|
"loss": 0.3238, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"learning_rate": 1.5577382045162474e-05, |
|
"loss": 0.3104, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 1.4429961446667892e-05, |
|
"loss": 0.3117, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"learning_rate": 1.3282540848173307e-05, |
|
"loss": 0.3137, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_bleu": 29.6666, |
|
"eval_gen_len": 14.524, |
|
"eval_loss": 1.7916326522827148, |
|
"eval_runtime": 642.1683, |
|
"eval_samples_per_second": 17.351, |
|
"eval_steps_per_second": 1.085, |
|
"step": 16716 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"learning_rate": 1.2135120249678723e-05, |
|
"loss": 0.2987, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 6.28, |
|
"learning_rate": 1.0987699651184138e-05, |
|
"loss": 0.2428, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"learning_rate": 9.840279052689555e-06, |
|
"loss": 0.2263, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"learning_rate": 8.69285845419497e-06, |
|
"loss": 0.2229, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"learning_rate": 7.5454378557003866e-06, |
|
"loss": 0.2255, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 6.398017257205802e-06, |
|
"loss": 0.2303, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_bleu": 30.4697, |
|
"eval_gen_len": 14.5571, |
|
"eval_loss": 1.8367947340011597, |
|
"eval_runtime": 644.2042, |
|
"eval_samples_per_second": 17.296, |
|
"eval_steps_per_second": 1.082, |
|
"step": 19502 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"learning_rate": 5.250596658711218e-06, |
|
"loss": 0.2054, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"learning_rate": 4.103176060216634e-06, |
|
"loss": 0.1737, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"learning_rate": 2.9557554617220493e-06, |
|
"loss": 0.1702, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"learning_rate": 1.8083348632274646e-06, |
|
"loss": 0.1774, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 7.9, |
|
"learning_rate": 6.609142647328805e-07, |
|
"loss": 0.1888, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_bleu": 30.6847, |
|
"eval_gen_len": 14.6895, |
|
"eval_loss": 1.8482487201690674, |
|
"eval_runtime": 644.9808, |
|
"eval_samples_per_second": 17.275, |
|
"eval_steps_per_second": 1.081, |
|
"step": 22288 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 22288, |
|
"num_train_epochs": 8, |
|
"save_steps": 500, |
|
"total_flos": 1.5456316224207585e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|