|
{ |
|
"best_metric": 0.9154929577464789, |
|
"best_model_checkpoint": "deit-base-distilled-patch16-224-65-fold2/checkpoint-172", |
|
"epoch": 92.3076923076923, |
|
"eval_steps": 500, |
|
"global_step": 300, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.9230769230769231, |
|
"eval_accuracy": 0.43661971830985913, |
|
"eval_loss": 0.7210970520973206, |
|
"eval_runtime": 0.9774, |
|
"eval_samples_per_second": 72.641, |
|
"eval_steps_per_second": 3.069, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 1.8461538461538463, |
|
"eval_accuracy": 0.5070422535211268, |
|
"eval_loss": 0.7016032934188843, |
|
"eval_runtime": 0.9676, |
|
"eval_samples_per_second": 73.379, |
|
"eval_steps_per_second": 3.101, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 2.769230769230769, |
|
"eval_accuracy": 0.6197183098591549, |
|
"eval_loss": 0.6702623963356018, |
|
"eval_runtime": 0.9591, |
|
"eval_samples_per_second": 74.026, |
|
"eval_steps_per_second": 3.128, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 3.076923076923077, |
|
"grad_norm": 4.81842565536499, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 0.6946, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.6619718309859155, |
|
"eval_loss": 0.6381291151046753, |
|
"eval_runtime": 0.9682, |
|
"eval_samples_per_second": 73.329, |
|
"eval_steps_per_second": 3.098, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 4.923076923076923, |
|
"eval_accuracy": 0.676056338028169, |
|
"eval_loss": 0.594517171382904, |
|
"eval_runtime": 0.9627, |
|
"eval_samples_per_second": 73.753, |
|
"eval_steps_per_second": 3.116, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 5.846153846153846, |
|
"eval_accuracy": 0.7183098591549296, |
|
"eval_loss": 0.6083856225013733, |
|
"eval_runtime": 0.962, |
|
"eval_samples_per_second": 73.805, |
|
"eval_steps_per_second": 3.119, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 6.153846153846154, |
|
"grad_norm": 5.711062908172607, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 0.6262, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 6.769230769230769, |
|
"eval_accuracy": 0.7464788732394366, |
|
"eval_loss": 0.5638889670372009, |
|
"eval_runtime": 0.9689, |
|
"eval_samples_per_second": 73.276, |
|
"eval_steps_per_second": 3.096, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_accuracy": 0.7746478873239436, |
|
"eval_loss": 0.5203454494476318, |
|
"eval_runtime": 0.9744, |
|
"eval_samples_per_second": 72.864, |
|
"eval_steps_per_second": 3.079, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 8.923076923076923, |
|
"eval_accuracy": 0.7887323943661971, |
|
"eval_loss": 0.4804993271827698, |
|
"eval_runtime": 0.9763, |
|
"eval_samples_per_second": 72.725, |
|
"eval_steps_per_second": 3.073, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 9.23076923076923, |
|
"grad_norm": 3.8290281295776367, |
|
"learning_rate": 5e-05, |
|
"loss": 0.544, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 9.846153846153847, |
|
"eval_accuracy": 0.7323943661971831, |
|
"eval_loss": 0.5204476118087769, |
|
"eval_runtime": 0.9609, |
|
"eval_samples_per_second": 73.886, |
|
"eval_steps_per_second": 3.122, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 10.76923076923077, |
|
"eval_accuracy": 0.7746478873239436, |
|
"eval_loss": 0.46348023414611816, |
|
"eval_runtime": 0.9654, |
|
"eval_samples_per_second": 73.545, |
|
"eval_steps_per_second": 3.108, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"eval_accuracy": 0.7605633802816901, |
|
"eval_loss": 0.4957320988178253, |
|
"eval_runtime": 0.9736, |
|
"eval_samples_per_second": 72.928, |
|
"eval_steps_per_second": 3.081, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 12.307692307692308, |
|
"grad_norm": 7.22072696685791, |
|
"learning_rate": 4.814814814814815e-05, |
|
"loss": 0.516, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 12.923076923076923, |
|
"eval_accuracy": 0.7746478873239436, |
|
"eval_loss": 0.47228991985321045, |
|
"eval_runtime": 0.9665, |
|
"eval_samples_per_second": 73.461, |
|
"eval_steps_per_second": 3.104, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 13.846153846153847, |
|
"eval_accuracy": 0.704225352112676, |
|
"eval_loss": 0.5169680118560791, |
|
"eval_runtime": 0.9657, |
|
"eval_samples_per_second": 73.519, |
|
"eval_steps_per_second": 3.106, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 14.76923076923077, |
|
"eval_accuracy": 0.8169014084507042, |
|
"eval_loss": 0.5404752492904663, |
|
"eval_runtime": 0.9676, |
|
"eval_samples_per_second": 73.379, |
|
"eval_steps_per_second": 3.101, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 15.384615384615385, |
|
"grad_norm": 10.899679183959961, |
|
"learning_rate": 4.62962962962963e-05, |
|
"loss": 0.4938, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"eval_accuracy": 0.7323943661971831, |
|
"eval_loss": 0.5082057118415833, |
|
"eval_runtime": 1.0054, |
|
"eval_samples_per_second": 70.619, |
|
"eval_steps_per_second": 2.984, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 16.923076923076923, |
|
"eval_accuracy": 0.7887323943661971, |
|
"eval_loss": 0.4608191251754761, |
|
"eval_runtime": 0.9655, |
|
"eval_samples_per_second": 73.539, |
|
"eval_steps_per_second": 3.107, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 17.846153846153847, |
|
"eval_accuracy": 0.7605633802816901, |
|
"eval_loss": 0.4211063086986542, |
|
"eval_runtime": 0.9818, |
|
"eval_samples_per_second": 72.313, |
|
"eval_steps_per_second": 3.055, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 18.46153846153846, |
|
"grad_norm": 5.350540637969971, |
|
"learning_rate": 4.4444444444444447e-05, |
|
"loss": 0.4123, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 18.76923076923077, |
|
"eval_accuracy": 0.7746478873239436, |
|
"eval_loss": 0.5014901757240295, |
|
"eval_runtime": 0.9712, |
|
"eval_samples_per_second": 73.105, |
|
"eval_steps_per_second": 3.089, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"eval_accuracy": 0.8591549295774648, |
|
"eval_loss": 0.3935275375843048, |
|
"eval_runtime": 0.9719, |
|
"eval_samples_per_second": 73.054, |
|
"eval_steps_per_second": 3.087, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 20.923076923076923, |
|
"eval_accuracy": 0.8169014084507042, |
|
"eval_loss": 0.4178997874259949, |
|
"eval_runtime": 0.9729, |
|
"eval_samples_per_second": 72.978, |
|
"eval_steps_per_second": 3.084, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 21.53846153846154, |
|
"grad_norm": 4.967121124267578, |
|
"learning_rate": 4.259259259259259e-05, |
|
"loss": 0.3489, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 21.846153846153847, |
|
"eval_accuracy": 0.9014084507042254, |
|
"eval_loss": 0.3991130590438843, |
|
"eval_runtime": 0.9718, |
|
"eval_samples_per_second": 73.064, |
|
"eval_steps_per_second": 3.087, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 22.76923076923077, |
|
"eval_accuracy": 0.8591549295774648, |
|
"eval_loss": 0.39102521538734436, |
|
"eval_runtime": 0.9741, |
|
"eval_samples_per_second": 72.89, |
|
"eval_steps_per_second": 3.08, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 24.0, |
|
"eval_accuracy": 0.8309859154929577, |
|
"eval_loss": 0.4276868402957916, |
|
"eval_runtime": 0.9687, |
|
"eval_samples_per_second": 73.295, |
|
"eval_steps_per_second": 3.097, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 24.615384615384617, |
|
"grad_norm": 6.507815837860107, |
|
"learning_rate": 4.074074074074074e-05, |
|
"loss": 0.2889, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 24.923076923076923, |
|
"eval_accuracy": 0.8169014084507042, |
|
"eval_loss": 0.40316298604011536, |
|
"eval_runtime": 0.9728, |
|
"eval_samples_per_second": 72.986, |
|
"eval_steps_per_second": 3.084, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 25.846153846153847, |
|
"eval_accuracy": 0.8591549295774648, |
|
"eval_loss": 0.37030014395713806, |
|
"eval_runtime": 0.9651, |
|
"eval_samples_per_second": 73.566, |
|
"eval_steps_per_second": 3.108, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 26.76923076923077, |
|
"eval_accuracy": 0.8309859154929577, |
|
"eval_loss": 0.44038423895835876, |
|
"eval_runtime": 0.9745, |
|
"eval_samples_per_second": 72.856, |
|
"eval_steps_per_second": 3.078, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 27.692307692307693, |
|
"grad_norm": 4.317188739776611, |
|
"learning_rate": 3.888888888888889e-05, |
|
"loss": 0.2659, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 28.0, |
|
"eval_accuracy": 0.8591549295774648, |
|
"eval_loss": 0.3666124641895294, |
|
"eval_runtime": 0.9751, |
|
"eval_samples_per_second": 72.814, |
|
"eval_steps_per_second": 3.077, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 28.923076923076923, |
|
"eval_accuracy": 0.8169014084507042, |
|
"eval_loss": 0.39921075105667114, |
|
"eval_runtime": 0.9713, |
|
"eval_samples_per_second": 73.095, |
|
"eval_steps_per_second": 3.089, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 29.846153846153847, |
|
"eval_accuracy": 0.8169014084507042, |
|
"eval_loss": 0.4040003716945648, |
|
"eval_runtime": 0.9702, |
|
"eval_samples_per_second": 73.183, |
|
"eval_steps_per_second": 3.092, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 30.76923076923077, |
|
"grad_norm": 2.3390183448791504, |
|
"learning_rate": 3.7037037037037037e-05, |
|
"loss": 0.2269, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 30.76923076923077, |
|
"eval_accuracy": 0.8591549295774648, |
|
"eval_loss": 0.355948269367218, |
|
"eval_runtime": 0.9741, |
|
"eval_samples_per_second": 72.89, |
|
"eval_steps_per_second": 3.08, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 32.0, |
|
"eval_accuracy": 0.8028169014084507, |
|
"eval_loss": 0.4765550196170807, |
|
"eval_runtime": 0.9724, |
|
"eval_samples_per_second": 73.018, |
|
"eval_steps_per_second": 3.085, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 32.92307692307692, |
|
"eval_accuracy": 0.8591549295774648, |
|
"eval_loss": 0.3851951062679291, |
|
"eval_runtime": 0.965, |
|
"eval_samples_per_second": 73.578, |
|
"eval_steps_per_second": 3.109, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 33.84615384615385, |
|
"grad_norm": 4.255270957946777, |
|
"learning_rate": 3.518518518518519e-05, |
|
"loss": 0.2031, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 33.84615384615385, |
|
"eval_accuracy": 0.8591549295774648, |
|
"eval_loss": 0.37017911672592163, |
|
"eval_runtime": 0.975, |
|
"eval_samples_per_second": 72.818, |
|
"eval_steps_per_second": 3.077, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 34.76923076923077, |
|
"eval_accuracy": 0.8732394366197183, |
|
"eval_loss": 0.32030993700027466, |
|
"eval_runtime": 0.972, |
|
"eval_samples_per_second": 73.048, |
|
"eval_steps_per_second": 3.087, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 36.0, |
|
"eval_accuracy": 0.8169014084507042, |
|
"eval_loss": 0.5303316116333008, |
|
"eval_runtime": 0.9701, |
|
"eval_samples_per_second": 73.19, |
|
"eval_steps_per_second": 3.093, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 36.92307692307692, |
|
"grad_norm": 4.1614155769348145, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 0.2037, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 36.92307692307692, |
|
"eval_accuracy": 0.8732394366197183, |
|
"eval_loss": 0.3896830081939697, |
|
"eval_runtime": 0.9797, |
|
"eval_samples_per_second": 72.472, |
|
"eval_steps_per_second": 3.062, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 37.84615384615385, |
|
"eval_accuracy": 0.8732394366197183, |
|
"eval_loss": 0.3841053247451782, |
|
"eval_runtime": 0.9701, |
|
"eval_samples_per_second": 73.191, |
|
"eval_steps_per_second": 3.093, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 38.76923076923077, |
|
"eval_accuracy": 0.8873239436619719, |
|
"eval_loss": 0.38963139057159424, |
|
"eval_runtime": 0.9753, |
|
"eval_samples_per_second": 72.802, |
|
"eval_steps_per_second": 3.076, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 40.0, |
|
"grad_norm": 3.8179051876068115, |
|
"learning_rate": 3.148148148148148e-05, |
|
"loss": 0.2018, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 40.0, |
|
"eval_accuracy": 0.8450704225352113, |
|
"eval_loss": 0.4176679849624634, |
|
"eval_runtime": 0.9825, |
|
"eval_samples_per_second": 72.267, |
|
"eval_steps_per_second": 3.054, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 40.92307692307692, |
|
"eval_accuracy": 0.8450704225352113, |
|
"eval_loss": 0.4547717273235321, |
|
"eval_runtime": 0.9725, |
|
"eval_samples_per_second": 73.008, |
|
"eval_steps_per_second": 3.085, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 41.84615384615385, |
|
"eval_accuracy": 0.8309859154929577, |
|
"eval_loss": 0.4115450382232666, |
|
"eval_runtime": 0.9703, |
|
"eval_samples_per_second": 73.173, |
|
"eval_steps_per_second": 3.092, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 42.76923076923077, |
|
"eval_accuracy": 0.8450704225352113, |
|
"eval_loss": 0.4120871424674988, |
|
"eval_runtime": 0.9779, |
|
"eval_samples_per_second": 72.604, |
|
"eval_steps_per_second": 3.068, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 43.07692307692308, |
|
"grad_norm": 6.913601875305176, |
|
"learning_rate": 2.962962962962963e-05, |
|
"loss": 0.1721, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 44.0, |
|
"eval_accuracy": 0.8591549295774648, |
|
"eval_loss": 0.3919922411441803, |
|
"eval_runtime": 0.9673, |
|
"eval_samples_per_second": 73.403, |
|
"eval_steps_per_second": 3.102, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 44.92307692307692, |
|
"eval_accuracy": 0.8450704225352113, |
|
"eval_loss": 0.36926642060279846, |
|
"eval_runtime": 0.9653, |
|
"eval_samples_per_second": 73.552, |
|
"eval_steps_per_second": 3.108, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 45.84615384615385, |
|
"eval_accuracy": 0.8591549295774648, |
|
"eval_loss": 0.36045223474502563, |
|
"eval_runtime": 0.957, |
|
"eval_samples_per_second": 74.188, |
|
"eval_steps_per_second": 3.135, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 46.15384615384615, |
|
"grad_norm": 4.530900001525879, |
|
"learning_rate": 2.777777777777778e-05, |
|
"loss": 0.1678, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 46.76923076923077, |
|
"eval_accuracy": 0.8309859154929577, |
|
"eval_loss": 0.54340660572052, |
|
"eval_runtime": 0.9636, |
|
"eval_samples_per_second": 73.681, |
|
"eval_steps_per_second": 3.113, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 48.0, |
|
"eval_accuracy": 0.8309859154929577, |
|
"eval_loss": 0.41889968514442444, |
|
"eval_runtime": 0.9712, |
|
"eval_samples_per_second": 73.103, |
|
"eval_steps_per_second": 3.089, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 48.92307692307692, |
|
"eval_accuracy": 0.8873239436619719, |
|
"eval_loss": 0.31237733364105225, |
|
"eval_runtime": 0.9625, |
|
"eval_samples_per_second": 73.765, |
|
"eval_steps_per_second": 3.117, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 49.23076923076923, |
|
"grad_norm": 3.286679744720459, |
|
"learning_rate": 2.5925925925925925e-05, |
|
"loss": 0.1604, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 49.84615384615385, |
|
"eval_accuracy": 0.8873239436619719, |
|
"eval_loss": 0.32932186126708984, |
|
"eval_runtime": 0.9716, |
|
"eval_samples_per_second": 73.073, |
|
"eval_steps_per_second": 3.088, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 50.76923076923077, |
|
"eval_accuracy": 0.9014084507042254, |
|
"eval_loss": 0.3371872901916504, |
|
"eval_runtime": 0.9747, |
|
"eval_samples_per_second": 72.843, |
|
"eval_steps_per_second": 3.078, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 52.0, |
|
"eval_accuracy": 0.8732394366197183, |
|
"eval_loss": 0.35047727823257446, |
|
"eval_runtime": 0.9734, |
|
"eval_samples_per_second": 72.943, |
|
"eval_steps_per_second": 3.082, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 52.30769230769231, |
|
"grad_norm": 3.787102460861206, |
|
"learning_rate": 2.4074074074074074e-05, |
|
"loss": 0.1406, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 52.92307692307692, |
|
"eval_accuracy": 0.9154929577464789, |
|
"eval_loss": 0.3094751834869385, |
|
"eval_runtime": 0.9675, |
|
"eval_samples_per_second": 73.386, |
|
"eval_steps_per_second": 3.101, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 53.84615384615385, |
|
"eval_accuracy": 0.9154929577464789, |
|
"eval_loss": 0.3054424822330475, |
|
"eval_runtime": 0.9688, |
|
"eval_samples_per_second": 73.284, |
|
"eval_steps_per_second": 3.097, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 54.76923076923077, |
|
"eval_accuracy": 0.8873239436619719, |
|
"eval_loss": 0.36954671144485474, |
|
"eval_runtime": 0.9714, |
|
"eval_samples_per_second": 73.089, |
|
"eval_steps_per_second": 3.088, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 55.38461538461539, |
|
"grad_norm": 2.4381654262542725, |
|
"learning_rate": 2.2222222222222223e-05, |
|
"loss": 0.1492, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 56.0, |
|
"eval_accuracy": 0.8591549295774648, |
|
"eval_loss": 0.4058314263820648, |
|
"eval_runtime": 0.972, |
|
"eval_samples_per_second": 73.043, |
|
"eval_steps_per_second": 3.086, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 56.92307692307692, |
|
"eval_accuracy": 0.8450704225352113, |
|
"eval_loss": 0.4649871587753296, |
|
"eval_runtime": 0.9843, |
|
"eval_samples_per_second": 72.131, |
|
"eval_steps_per_second": 3.048, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 57.84615384615385, |
|
"eval_accuracy": 0.8591549295774648, |
|
"eval_loss": 0.4060044288635254, |
|
"eval_runtime": 0.9744, |
|
"eval_samples_per_second": 72.864, |
|
"eval_steps_per_second": 3.079, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 58.46153846153846, |
|
"grad_norm": 2.546844244003296, |
|
"learning_rate": 2.037037037037037e-05, |
|
"loss": 0.1359, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 58.76923076923077, |
|
"eval_accuracy": 0.8873239436619719, |
|
"eval_loss": 0.3819490671157837, |
|
"eval_runtime": 0.9781, |
|
"eval_samples_per_second": 72.586, |
|
"eval_steps_per_second": 3.067, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 60.0, |
|
"eval_accuracy": 0.7887323943661971, |
|
"eval_loss": 0.5229614973068237, |
|
"eval_runtime": 0.9718, |
|
"eval_samples_per_second": 73.063, |
|
"eval_steps_per_second": 3.087, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 60.92307692307692, |
|
"eval_accuracy": 0.8169014084507042, |
|
"eval_loss": 0.4986177980899811, |
|
"eval_runtime": 0.9746, |
|
"eval_samples_per_second": 72.849, |
|
"eval_steps_per_second": 3.078, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 61.53846153846154, |
|
"grad_norm": 2.6978113651275635, |
|
"learning_rate": 1.8518518518518518e-05, |
|
"loss": 0.1264, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 61.84615384615385, |
|
"eval_accuracy": 0.8309859154929577, |
|
"eval_loss": 0.4570479691028595, |
|
"eval_runtime": 0.9683, |
|
"eval_samples_per_second": 73.323, |
|
"eval_steps_per_second": 3.098, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 62.76923076923077, |
|
"eval_accuracy": 0.8450704225352113, |
|
"eval_loss": 0.45072540640830994, |
|
"eval_runtime": 0.9768, |
|
"eval_samples_per_second": 72.685, |
|
"eval_steps_per_second": 3.071, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 64.0, |
|
"eval_accuracy": 0.8028169014084507, |
|
"eval_loss": 0.5765404105186462, |
|
"eval_runtime": 0.976, |
|
"eval_samples_per_second": 72.748, |
|
"eval_steps_per_second": 3.074, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 64.61538461538461, |
|
"grad_norm": 3.2018659114837646, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 0.1478, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 64.92307692307692, |
|
"eval_accuracy": 0.8591549295774648, |
|
"eval_loss": 0.45142102241516113, |
|
"eval_runtime": 0.9733, |
|
"eval_samples_per_second": 72.945, |
|
"eval_steps_per_second": 3.082, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 65.84615384615384, |
|
"eval_accuracy": 0.8873239436619719, |
|
"eval_loss": 0.44336047768592834, |
|
"eval_runtime": 0.974, |
|
"eval_samples_per_second": 72.897, |
|
"eval_steps_per_second": 3.08, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 66.76923076923077, |
|
"eval_accuracy": 0.8591549295774648, |
|
"eval_loss": 0.44032683968544006, |
|
"eval_runtime": 0.9712, |
|
"eval_samples_per_second": 73.106, |
|
"eval_steps_per_second": 3.089, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 67.6923076923077, |
|
"grad_norm": 3.408703327178955, |
|
"learning_rate": 1.4814814814814815e-05, |
|
"loss": 0.1398, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 68.0, |
|
"eval_accuracy": 0.8309859154929577, |
|
"eval_loss": 0.5928447842597961, |
|
"eval_runtime": 0.9753, |
|
"eval_samples_per_second": 72.795, |
|
"eval_steps_per_second": 3.076, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 68.92307692307692, |
|
"eval_accuracy": 0.8591549295774648, |
|
"eval_loss": 0.45865926146507263, |
|
"eval_runtime": 0.9755, |
|
"eval_samples_per_second": 72.781, |
|
"eval_steps_per_second": 3.075, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 69.84615384615384, |
|
"eval_accuracy": 0.8450704225352113, |
|
"eval_loss": 0.4053456783294678, |
|
"eval_runtime": 0.9772, |
|
"eval_samples_per_second": 72.658, |
|
"eval_steps_per_second": 3.07, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 70.76923076923077, |
|
"grad_norm": 2.2398223876953125, |
|
"learning_rate": 1.2962962962962962e-05, |
|
"loss": 0.161, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 70.76923076923077, |
|
"eval_accuracy": 0.8591549295774648, |
|
"eval_loss": 0.4232625663280487, |
|
"eval_runtime": 0.9751, |
|
"eval_samples_per_second": 72.817, |
|
"eval_steps_per_second": 3.077, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 72.0, |
|
"eval_accuracy": 0.8591549295774648, |
|
"eval_loss": 0.4264310300350189, |
|
"eval_runtime": 0.9733, |
|
"eval_samples_per_second": 72.951, |
|
"eval_steps_per_second": 3.082, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 72.92307692307692, |
|
"eval_accuracy": 0.8309859154929577, |
|
"eval_loss": 0.4126971662044525, |
|
"eval_runtime": 0.9786, |
|
"eval_samples_per_second": 72.549, |
|
"eval_steps_per_second": 3.065, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 73.84615384615384, |
|
"grad_norm": 2.928635597229004, |
|
"learning_rate": 1.1111111111111112e-05, |
|
"loss": 0.1326, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 73.84615384615384, |
|
"eval_accuracy": 0.8591549295774648, |
|
"eval_loss": 0.40130165219306946, |
|
"eval_runtime": 0.9713, |
|
"eval_samples_per_second": 73.095, |
|
"eval_steps_per_second": 3.089, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 74.76923076923077, |
|
"eval_accuracy": 0.8450704225352113, |
|
"eval_loss": 0.4389221966266632, |
|
"eval_runtime": 0.9794, |
|
"eval_samples_per_second": 72.493, |
|
"eval_steps_per_second": 3.063, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 76.0, |
|
"eval_accuracy": 0.8591549295774648, |
|
"eval_loss": 0.3771601915359497, |
|
"eval_runtime": 1.0048, |
|
"eval_samples_per_second": 70.66, |
|
"eval_steps_per_second": 2.986, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 76.92307692307692, |
|
"grad_norm": 3.744046926498413, |
|
"learning_rate": 9.259259259259259e-06, |
|
"loss": 0.1236, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 76.92307692307692, |
|
"eval_accuracy": 0.8732394366197183, |
|
"eval_loss": 0.3600108325481415, |
|
"eval_runtime": 0.9714, |
|
"eval_samples_per_second": 73.091, |
|
"eval_steps_per_second": 3.088, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 77.84615384615384, |
|
"eval_accuracy": 0.8873239436619719, |
|
"eval_loss": 0.3890267312526703, |
|
"eval_runtime": 0.9697, |
|
"eval_samples_per_second": 73.219, |
|
"eval_steps_per_second": 3.094, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 78.76923076923077, |
|
"eval_accuracy": 0.8450704225352113, |
|
"eval_loss": 0.4401465356349945, |
|
"eval_runtime": 0.9933, |
|
"eval_samples_per_second": 71.478, |
|
"eval_steps_per_second": 3.02, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 80.0, |
|
"grad_norm": 2.131753444671631, |
|
"learning_rate": 7.4074074074074075e-06, |
|
"loss": 0.0973, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 80.0, |
|
"eval_accuracy": 0.8591549295774648, |
|
"eval_loss": 0.40144455432891846, |
|
"eval_runtime": 0.9626, |
|
"eval_samples_per_second": 73.756, |
|
"eval_steps_per_second": 3.116, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 80.92307692307692, |
|
"eval_accuracy": 0.8732394366197183, |
|
"eval_loss": 0.3766299784183502, |
|
"eval_runtime": 0.9827, |
|
"eval_samples_per_second": 72.248, |
|
"eval_steps_per_second": 3.053, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 81.84615384615384, |
|
"eval_accuracy": 0.8450704225352113, |
|
"eval_loss": 0.3907908499240875, |
|
"eval_runtime": 0.9713, |
|
"eval_samples_per_second": 73.096, |
|
"eval_steps_per_second": 3.089, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 82.76923076923077, |
|
"eval_accuracy": 0.8591549295774648, |
|
"eval_loss": 0.43390902876853943, |
|
"eval_runtime": 0.9782, |
|
"eval_samples_per_second": 72.584, |
|
"eval_steps_per_second": 3.067, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 83.07692307692308, |
|
"grad_norm": 1.7664235830307007, |
|
"learning_rate": 5.555555555555556e-06, |
|
"loss": 0.1079, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 84.0, |
|
"eval_accuracy": 0.8591549295774648, |
|
"eval_loss": 0.4567141532897949, |
|
"eval_runtime": 0.972, |
|
"eval_samples_per_second": 73.048, |
|
"eval_steps_per_second": 3.087, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 84.92307692307692, |
|
"eval_accuracy": 0.8732394366197183, |
|
"eval_loss": 0.44145017862319946, |
|
"eval_runtime": 0.9751, |
|
"eval_samples_per_second": 72.813, |
|
"eval_steps_per_second": 3.077, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 85.84615384615384, |
|
"eval_accuracy": 0.8591549295774648, |
|
"eval_loss": 0.41829437017440796, |
|
"eval_runtime": 0.9736, |
|
"eval_samples_per_second": 72.929, |
|
"eval_steps_per_second": 3.081, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 86.15384615384616, |
|
"grad_norm": 3.2624311447143555, |
|
"learning_rate": 3.7037037037037037e-06, |
|
"loss": 0.1015, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 86.76923076923077, |
|
"eval_accuracy": 0.8873239436619719, |
|
"eval_loss": 0.4039430618286133, |
|
"eval_runtime": 0.9659, |
|
"eval_samples_per_second": 73.504, |
|
"eval_steps_per_second": 3.106, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 88.0, |
|
"eval_accuracy": 0.8873239436619719, |
|
"eval_loss": 0.3996119201183319, |
|
"eval_runtime": 0.9685, |
|
"eval_samples_per_second": 73.307, |
|
"eval_steps_per_second": 3.097, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 88.92307692307692, |
|
"eval_accuracy": 0.9014084507042254, |
|
"eval_loss": 0.403146356344223, |
|
"eval_runtime": 0.9729, |
|
"eval_samples_per_second": 72.978, |
|
"eval_steps_per_second": 3.084, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 89.23076923076923, |
|
"grad_norm": 3.2965073585510254, |
|
"learning_rate": 1.8518518518518519e-06, |
|
"loss": 0.1174, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 89.84615384615384, |
|
"eval_accuracy": 0.8732394366197183, |
|
"eval_loss": 0.41012877225875854, |
|
"eval_runtime": 0.9657, |
|
"eval_samples_per_second": 73.522, |
|
"eval_steps_per_second": 3.107, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 90.76923076923077, |
|
"eval_accuracy": 0.8732394366197183, |
|
"eval_loss": 0.41530841588974, |
|
"eval_runtime": 0.9728, |
|
"eval_samples_per_second": 72.988, |
|
"eval_steps_per_second": 3.084, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 92.0, |
|
"eval_accuracy": 0.8732394366197183, |
|
"eval_loss": 0.41457247734069824, |
|
"eval_runtime": 0.9792, |
|
"eval_samples_per_second": 72.506, |
|
"eval_steps_per_second": 3.064, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 92.3076923076923, |
|
"grad_norm": 1.616357445716858, |
|
"learning_rate": 0.0, |
|
"loss": 0.0968, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 92.3076923076923, |
|
"eval_accuracy": 0.8732394366197183, |
|
"eval_loss": 0.4145110845565796, |
|
"eval_runtime": 0.9776, |
|
"eval_samples_per_second": 72.625, |
|
"eval_steps_per_second": 3.069, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 92.3076923076923, |
|
"step": 300, |
|
"total_flos": 2.8402872494292173e+18, |
|
"train_loss": 0.24346253156661987, |
|
"train_runtime": 1610.5192, |
|
"train_samples_per_second": 24.65, |
|
"train_steps_per_second": 0.186 |
|
}, |
|
{ |
|
"epoch": 92.3076923076923, |
|
"eval_accuracy": 0.9154929577464789, |
|
"eval_loss": 0.3094751834869385, |
|
"eval_runtime": 1.0166, |
|
"eval_samples_per_second": 69.839, |
|
"eval_steps_per_second": 2.951, |
|
"step": 300 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 300, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 100, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.8402872494292173e+18, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|