|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.5735918320523116, |
|
"eval_steps": 10, |
|
"global_step": 5000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.001147183664104623, |
|
"eval_loss": 1.873344898223877, |
|
"eval_runtime": 12.7647, |
|
"eval_samples_per_second": 515.797, |
|
"eval_steps_per_second": 8.069, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.002294367328209246, |
|
"eval_loss": 1.8726389408111572, |
|
"eval_runtime": 12.8667, |
|
"eval_samples_per_second": 511.709, |
|
"eval_steps_per_second": 8.005, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0034415509923138693, |
|
"eval_loss": 1.8714078664779663, |
|
"eval_runtime": 12.9103, |
|
"eval_samples_per_second": 509.979, |
|
"eval_steps_per_second": 7.978, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.004588734656418492, |
|
"eval_loss": 1.8696790933609009, |
|
"eval_runtime": 12.947, |
|
"eval_samples_per_second": 508.534, |
|
"eval_steps_per_second": 7.955, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0057359183205231154, |
|
"eval_loss": 1.8675329685211182, |
|
"eval_runtime": 12.9458, |
|
"eval_samples_per_second": 508.582, |
|
"eval_steps_per_second": 7.956, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.006883101984627739, |
|
"eval_loss": 1.8649154901504517, |
|
"eval_runtime": 13.0432, |
|
"eval_samples_per_second": 504.785, |
|
"eval_steps_per_second": 7.897, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.008030285648732363, |
|
"eval_loss": 1.8619294166564941, |
|
"eval_runtime": 13.0638, |
|
"eval_samples_per_second": 503.988, |
|
"eval_steps_per_second": 7.884, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.009177469312836984, |
|
"eval_loss": 1.8583979606628418, |
|
"eval_runtime": 13.0482, |
|
"eval_samples_per_second": 504.592, |
|
"eval_steps_per_second": 7.894, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.010324652976941608, |
|
"eval_loss": 1.85438871383667, |
|
"eval_runtime": 13.0615, |
|
"eval_samples_per_second": 504.075, |
|
"eval_steps_per_second": 7.886, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.011471836641046231, |
|
"grad_norm": 9.938580513000488, |
|
"learning_rate": 3.8226299694189603e-07, |
|
"loss": 3.1046, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.011471836641046231, |
|
"eval_loss": 1.849947214126587, |
|
"eval_runtime": 13.0663, |
|
"eval_samples_per_second": 503.89, |
|
"eval_steps_per_second": 7.883, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.012619020305150854, |
|
"eval_loss": 1.8451412916183472, |
|
"eval_runtime": 12.9771, |
|
"eval_samples_per_second": 507.357, |
|
"eval_steps_per_second": 7.937, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.013766203969255477, |
|
"eval_loss": 1.8399487733840942, |
|
"eval_runtime": 13.0209, |
|
"eval_samples_per_second": 505.648, |
|
"eval_steps_per_second": 7.91, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.0149133876333601, |
|
"eval_loss": 1.8342881202697754, |
|
"eval_runtime": 13.0369, |
|
"eval_samples_per_second": 505.028, |
|
"eval_steps_per_second": 7.901, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.016060571297464726, |
|
"eval_loss": 1.8283486366271973, |
|
"eval_runtime": 13.0149, |
|
"eval_samples_per_second": 505.88, |
|
"eval_steps_per_second": 7.914, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.017207754961569347, |
|
"eval_loss": 1.822334885597229, |
|
"eval_runtime": 13.0213, |
|
"eval_samples_per_second": 505.632, |
|
"eval_steps_per_second": 7.91, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.01835493862567397, |
|
"eval_loss": 1.8158738613128662, |
|
"eval_runtime": 13.0599, |
|
"eval_samples_per_second": 504.14, |
|
"eval_steps_per_second": 7.887, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.019502122289778594, |
|
"eval_loss": 1.8090614080429077, |
|
"eval_runtime": 13.034, |
|
"eval_samples_per_second": 505.14, |
|
"eval_steps_per_second": 7.902, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.020649305953883215, |
|
"eval_loss": 1.8015782833099365, |
|
"eval_runtime": 13.0665, |
|
"eval_samples_per_second": 503.885, |
|
"eval_steps_per_second": 7.883, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.02179648961798784, |
|
"eval_loss": 1.793796420097351, |
|
"eval_runtime": 13.0555, |
|
"eval_samples_per_second": 504.31, |
|
"eval_steps_per_second": 7.889, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.022943673282092462, |
|
"grad_norm": 4.906337738037109, |
|
"learning_rate": 7.645259938837921e-07, |
|
"loss": 3.0303, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.022943673282092462, |
|
"eval_loss": 1.785815715789795, |
|
"eval_runtime": 12.9925, |
|
"eval_samples_per_second": 506.754, |
|
"eval_steps_per_second": 7.928, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.024090856946197087, |
|
"eval_loss": 1.7775053977966309, |
|
"eval_runtime": 13.0639, |
|
"eval_samples_per_second": 503.986, |
|
"eval_steps_per_second": 7.884, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.025238040610301708, |
|
"eval_loss": 1.7692992687225342, |
|
"eval_runtime": 13.0129, |
|
"eval_samples_per_second": 505.96, |
|
"eval_steps_per_second": 7.915, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.026385224274406333, |
|
"eval_loss": 1.760453224182129, |
|
"eval_runtime": 13.0078, |
|
"eval_samples_per_second": 506.158, |
|
"eval_steps_per_second": 7.918, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.027532407938510955, |
|
"eval_loss": 1.751396656036377, |
|
"eval_runtime": 12.9957, |
|
"eval_samples_per_second": 506.628, |
|
"eval_steps_per_second": 7.926, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.02867959160261558, |
|
"eval_loss": 1.7417218685150146, |
|
"eval_runtime": 12.9774, |
|
"eval_samples_per_second": 507.344, |
|
"eval_steps_per_second": 7.937, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.0298267752667202, |
|
"eval_loss": 1.7319914102554321, |
|
"eval_runtime": 13.0219, |
|
"eval_samples_per_second": 505.611, |
|
"eval_steps_per_second": 7.91, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.030973958930824826, |
|
"eval_loss": 1.7227253913879395, |
|
"eval_runtime": 13.0026, |
|
"eval_samples_per_second": 506.361, |
|
"eval_steps_per_second": 7.922, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.03212114259492945, |
|
"eval_loss": 1.7133797407150269, |
|
"eval_runtime": 12.9757, |
|
"eval_samples_per_second": 507.409, |
|
"eval_steps_per_second": 7.938, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.03326832625903407, |
|
"eval_loss": 1.704041600227356, |
|
"eval_runtime": 12.9845, |
|
"eval_samples_per_second": 507.065, |
|
"eval_steps_per_second": 7.933, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.034415509923138694, |
|
"grad_norm": 4.665822505950928, |
|
"learning_rate": 1.1467889908256882e-06, |
|
"loss": 2.9459, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.034415509923138694, |
|
"eval_loss": 1.6940686702728271, |
|
"eval_runtime": 13.0019, |
|
"eval_samples_per_second": 506.387, |
|
"eval_steps_per_second": 7.922, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.035562693587243316, |
|
"eval_loss": 1.683342695236206, |
|
"eval_runtime": 13.0065, |
|
"eval_samples_per_second": 506.209, |
|
"eval_steps_per_second": 7.919, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.03670987725134794, |
|
"eval_loss": 1.6724653244018555, |
|
"eval_runtime": 13.0129, |
|
"eval_samples_per_second": 505.96, |
|
"eval_steps_per_second": 7.915, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.037857060915452566, |
|
"eval_loss": 1.6614341735839844, |
|
"eval_runtime": 12.9921, |
|
"eval_samples_per_second": 506.769, |
|
"eval_steps_per_second": 7.928, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.03900424457955719, |
|
"eval_loss": 1.6510112285614014, |
|
"eval_runtime": 13.0242, |
|
"eval_samples_per_second": 505.52, |
|
"eval_steps_per_second": 7.908, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.04015142824366181, |
|
"eval_loss": 1.6401513814926147, |
|
"eval_runtime": 12.9214, |
|
"eval_samples_per_second": 509.542, |
|
"eval_steps_per_second": 7.971, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.04129861190776643, |
|
"eval_loss": 1.6295816898345947, |
|
"eval_runtime": 12.9563, |
|
"eval_samples_per_second": 508.171, |
|
"eval_steps_per_second": 7.95, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.04244579557187106, |
|
"eval_loss": 1.6187150478363037, |
|
"eval_runtime": 12.9758, |
|
"eval_samples_per_second": 507.405, |
|
"eval_steps_per_second": 7.938, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.04359297923597568, |
|
"eval_loss": 1.607272982597351, |
|
"eval_runtime": 12.9876, |
|
"eval_samples_per_second": 506.947, |
|
"eval_steps_per_second": 7.931, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.0447401629000803, |
|
"eval_loss": 1.5961676836013794, |
|
"eval_runtime": 12.9782, |
|
"eval_samples_per_second": 507.313, |
|
"eval_steps_per_second": 7.936, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.045887346564184923, |
|
"grad_norm": 4.870114326477051, |
|
"learning_rate": 1.5290519877675841e-06, |
|
"loss": 2.7813, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.045887346564184923, |
|
"eval_loss": 1.5848218202590942, |
|
"eval_runtime": 12.9783, |
|
"eval_samples_per_second": 507.309, |
|
"eval_steps_per_second": 7.936, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.04703453022828955, |
|
"eval_loss": 1.5734797716140747, |
|
"eval_runtime": 12.9739, |
|
"eval_samples_per_second": 507.482, |
|
"eval_steps_per_second": 7.939, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.04818171389239417, |
|
"eval_loss": 1.562021255493164, |
|
"eval_runtime": 12.9388, |
|
"eval_samples_per_second": 508.855, |
|
"eval_steps_per_second": 7.961, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.049328897556498795, |
|
"eval_loss": 1.5495364665985107, |
|
"eval_runtime": 12.9412, |
|
"eval_samples_per_second": 508.764, |
|
"eval_steps_per_second": 7.959, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.050476081220603417, |
|
"eval_loss": 1.5375314950942993, |
|
"eval_runtime": 12.9686, |
|
"eval_samples_per_second": 507.687, |
|
"eval_steps_per_second": 7.942, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.051623264884708045, |
|
"eval_loss": 1.525598168373108, |
|
"eval_runtime": 12.9695, |
|
"eval_samples_per_second": 507.651, |
|
"eval_steps_per_second": 7.942, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.052770448548812667, |
|
"eval_loss": 1.5132672786712646, |
|
"eval_runtime": 12.8961, |
|
"eval_samples_per_second": 510.543, |
|
"eval_steps_per_second": 7.987, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.05391763221291729, |
|
"eval_loss": 1.5012215375900269, |
|
"eval_runtime": 12.9428, |
|
"eval_samples_per_second": 508.7, |
|
"eval_steps_per_second": 7.958, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.05506481587702191, |
|
"eval_loss": 1.4892219305038452, |
|
"eval_runtime": 12.9208, |
|
"eval_samples_per_second": 509.567, |
|
"eval_steps_per_second": 7.972, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.05621199954112653, |
|
"eval_loss": 1.4768636226654053, |
|
"eval_runtime": 12.9423, |
|
"eval_samples_per_second": 508.721, |
|
"eval_steps_per_second": 7.958, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.05735918320523116, |
|
"grad_norm": 4.155641555786133, |
|
"learning_rate": 1.9113149847094803e-06, |
|
"loss": 2.6308, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.05735918320523116, |
|
"eval_loss": 1.4640088081359863, |
|
"eval_runtime": 12.8729, |
|
"eval_samples_per_second": 511.462, |
|
"eval_steps_per_second": 8.001, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.05850636686933578, |
|
"eval_loss": 1.4513096809387207, |
|
"eval_runtime": 12.9653, |
|
"eval_samples_per_second": 507.817, |
|
"eval_steps_per_second": 7.944, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.0596535505334404, |
|
"eval_loss": 1.439149260520935, |
|
"eval_runtime": 12.9443, |
|
"eval_samples_per_second": 508.639, |
|
"eval_steps_per_second": 7.957, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.060800734197545024, |
|
"eval_loss": 1.426237940788269, |
|
"eval_runtime": 12.9496, |
|
"eval_samples_per_second": 508.433, |
|
"eval_steps_per_second": 7.954, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.06194791786164965, |
|
"eval_loss": 1.4129557609558105, |
|
"eval_runtime": 12.9822, |
|
"eval_samples_per_second": 507.157, |
|
"eval_steps_per_second": 7.934, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.06309510152575427, |
|
"eval_loss": 1.3997886180877686, |
|
"eval_runtime": 12.9979, |
|
"eval_samples_per_second": 506.542, |
|
"eval_steps_per_second": 7.924, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.0642422851898589, |
|
"eval_loss": 1.3873906135559082, |
|
"eval_runtime": 12.9378, |
|
"eval_samples_per_second": 508.895, |
|
"eval_steps_per_second": 7.961, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.06538946885396352, |
|
"eval_loss": 1.3751789331436157, |
|
"eval_runtime": 12.9624, |
|
"eval_samples_per_second": 507.932, |
|
"eval_steps_per_second": 7.946, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.06653665251806815, |
|
"eval_loss": 1.3620370626449585, |
|
"eval_runtime": 12.9498, |
|
"eval_samples_per_second": 508.426, |
|
"eval_steps_per_second": 7.954, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.06768383618217276, |
|
"eval_loss": 1.3485124111175537, |
|
"eval_runtime": 12.9759, |
|
"eval_samples_per_second": 507.403, |
|
"eval_steps_per_second": 7.938, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.06883101984627739, |
|
"grad_norm": 5.262124061584473, |
|
"learning_rate": 2.2935779816513764e-06, |
|
"loss": 2.4452, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.06883101984627739, |
|
"eval_loss": 1.3349775075912476, |
|
"eval_runtime": 12.9631, |
|
"eval_samples_per_second": 507.902, |
|
"eval_steps_per_second": 7.946, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.06997820351038202, |
|
"eval_loss": 1.3213400840759277, |
|
"eval_runtime": 12.9619, |
|
"eval_samples_per_second": 507.951, |
|
"eval_steps_per_second": 7.946, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.07112538717448663, |
|
"eval_loss": 1.308822512626648, |
|
"eval_runtime": 12.9652, |
|
"eval_samples_per_second": 507.82, |
|
"eval_steps_per_second": 7.944, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.07227257083859126, |
|
"eval_loss": 1.296485185623169, |
|
"eval_runtime": 13.0441, |
|
"eval_samples_per_second": 504.75, |
|
"eval_steps_per_second": 7.896, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.07341975450269587, |
|
"eval_loss": 1.283867597579956, |
|
"eval_runtime": 12.9822, |
|
"eval_samples_per_second": 507.154, |
|
"eval_steps_per_second": 7.934, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.0745669381668005, |
|
"eval_loss": 1.2713148593902588, |
|
"eval_runtime": 12.9775, |
|
"eval_samples_per_second": 507.338, |
|
"eval_steps_per_second": 7.937, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.07571412183090513, |
|
"eval_loss": 1.2591922283172607, |
|
"eval_runtime": 13.0227, |
|
"eval_samples_per_second": 505.578, |
|
"eval_steps_per_second": 7.909, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.07686130549500975, |
|
"eval_loss": 1.246610164642334, |
|
"eval_runtime": 13.014, |
|
"eval_samples_per_second": 505.917, |
|
"eval_steps_per_second": 7.915, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.07800848915911437, |
|
"eval_loss": 1.2331972122192383, |
|
"eval_runtime": 12.9634, |
|
"eval_samples_per_second": 507.891, |
|
"eval_steps_per_second": 7.945, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.079155672823219, |
|
"eval_loss": 1.2203081846237183, |
|
"eval_runtime": 12.9664, |
|
"eval_samples_per_second": 507.775, |
|
"eval_steps_per_second": 7.944, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.08030285648732362, |
|
"grad_norm": 3.824066400527954, |
|
"learning_rate": 2.6758409785932725e-06, |
|
"loss": 2.2626, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.08030285648732362, |
|
"eval_loss": 1.207729697227478, |
|
"eval_runtime": 13.037, |
|
"eval_samples_per_second": 505.025, |
|
"eval_steps_per_second": 7.901, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.08145004015142825, |
|
"eval_loss": 1.195885419845581, |
|
"eval_runtime": 12.9625, |
|
"eval_samples_per_second": 507.928, |
|
"eval_steps_per_second": 7.946, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.08259722381553286, |
|
"eval_loss": 1.1840639114379883, |
|
"eval_runtime": 13.0124, |
|
"eval_samples_per_second": 505.98, |
|
"eval_steps_per_second": 7.916, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.08374440747963749, |
|
"eval_loss": 1.1725258827209473, |
|
"eval_runtime": 12.9851, |
|
"eval_samples_per_second": 507.043, |
|
"eval_steps_per_second": 7.932, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.08489159114374212, |
|
"eval_loss": 1.1619255542755127, |
|
"eval_runtime": 13.1041, |
|
"eval_samples_per_second": 502.438, |
|
"eval_steps_per_second": 7.86, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.08603877480784673, |
|
"eval_loss": 1.1515777111053467, |
|
"eval_runtime": 12.9648, |
|
"eval_samples_per_second": 507.837, |
|
"eval_steps_per_second": 7.945, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.08718595847195136, |
|
"eval_loss": 1.1416434049606323, |
|
"eval_runtime": 13.0349, |
|
"eval_samples_per_second": 505.104, |
|
"eval_steps_per_second": 7.902, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.08833314213605599, |
|
"eval_loss": 1.1320044994354248, |
|
"eval_runtime": 13.0667, |
|
"eval_samples_per_second": 503.875, |
|
"eval_steps_per_second": 7.883, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.0894803258001606, |
|
"eval_loss": 1.1226693391799927, |
|
"eval_runtime": 13.3034, |
|
"eval_samples_per_second": 494.912, |
|
"eval_steps_per_second": 7.742, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.09062750946426523, |
|
"eval_loss": 1.113792061805725, |
|
"eval_runtime": 12.9793, |
|
"eval_samples_per_second": 507.271, |
|
"eval_steps_per_second": 7.936, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.09177469312836985, |
|
"grad_norm": 3.358140707015991, |
|
"learning_rate": 3.0581039755351682e-06, |
|
"loss": 2.0044, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.09177469312836985, |
|
"eval_loss": 1.1052675247192383, |
|
"eval_runtime": 13.0193, |
|
"eval_samples_per_second": 505.709, |
|
"eval_steps_per_second": 7.911, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.09292187679247448, |
|
"eval_loss": 1.0964922904968262, |
|
"eval_runtime": 13.1289, |
|
"eval_samples_per_second": 501.489, |
|
"eval_steps_per_second": 7.845, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.0940690604565791, |
|
"eval_loss": 1.0879426002502441, |
|
"eval_runtime": 12.9727, |
|
"eval_samples_per_second": 507.526, |
|
"eval_steps_per_second": 7.94, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.09521624412068372, |
|
"eval_loss": 1.0795575380325317, |
|
"eval_runtime": 13.1138, |
|
"eval_samples_per_second": 502.068, |
|
"eval_steps_per_second": 7.854, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.09636342778478835, |
|
"eval_loss": 1.0718028545379639, |
|
"eval_runtime": 12.9813, |
|
"eval_samples_per_second": 507.191, |
|
"eval_steps_per_second": 7.934, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.09751061144889296, |
|
"eval_loss": 1.0643888711929321, |
|
"eval_runtime": 13.0171, |
|
"eval_samples_per_second": 505.795, |
|
"eval_steps_per_second": 7.913, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.09865779511299759, |
|
"eval_loss": 1.0564391613006592, |
|
"eval_runtime": 13.0169, |
|
"eval_samples_per_second": 505.806, |
|
"eval_steps_per_second": 7.913, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.09980497877710222, |
|
"eval_loss": 1.0490267276763916, |
|
"eval_runtime": 13.0703, |
|
"eval_samples_per_second": 503.738, |
|
"eval_steps_per_second": 7.88, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.10095216244120683, |
|
"eval_loss": 1.0417358875274658, |
|
"eval_runtime": 13.0303, |
|
"eval_samples_per_second": 505.284, |
|
"eval_steps_per_second": 7.905, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.10209934610531146, |
|
"eval_loss": 1.0353840589523315, |
|
"eval_runtime": 12.9765, |
|
"eval_samples_per_second": 507.381, |
|
"eval_steps_per_second": 7.937, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.10324652976941609, |
|
"grad_norm": 3.705247402191162, |
|
"learning_rate": 3.4403669724770644e-06, |
|
"loss": 1.8763, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.10324652976941609, |
|
"eval_loss": 1.0295778512954712, |
|
"eval_runtime": 13.0396, |
|
"eval_samples_per_second": 504.924, |
|
"eval_steps_per_second": 7.899, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.1043937134335207, |
|
"eval_loss": 1.0238651037216187, |
|
"eval_runtime": 12.973, |
|
"eval_samples_per_second": 507.515, |
|
"eval_steps_per_second": 7.94, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.10554089709762533, |
|
"eval_loss": 1.0180109739303589, |
|
"eval_runtime": 13.0196, |
|
"eval_samples_per_second": 505.7, |
|
"eval_steps_per_second": 7.911, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.10668808076172995, |
|
"eval_loss": 1.0122654438018799, |
|
"eval_runtime": 12.9877, |
|
"eval_samples_per_second": 506.942, |
|
"eval_steps_per_second": 7.931, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.10783526442583458, |
|
"eval_loss": 1.0065423250198364, |
|
"eval_runtime": 13.0039, |
|
"eval_samples_per_second": 506.312, |
|
"eval_steps_per_second": 7.921, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.1089824480899392, |
|
"eval_loss": 1.0008338689804077, |
|
"eval_runtime": 13.1129, |
|
"eval_samples_per_second": 502.101, |
|
"eval_steps_per_second": 7.855, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.11012963175404382, |
|
"eval_loss": 0.9950375556945801, |
|
"eval_runtime": 13.0191, |
|
"eval_samples_per_second": 505.718, |
|
"eval_steps_per_second": 7.911, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.11127681541814845, |
|
"eval_loss": 0.9893631935119629, |
|
"eval_runtime": 12.9794, |
|
"eval_samples_per_second": 507.267, |
|
"eval_steps_per_second": 7.936, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.11242399908225306, |
|
"eval_loss": 0.9840025305747986, |
|
"eval_runtime": 12.9799, |
|
"eval_samples_per_second": 507.247, |
|
"eval_steps_per_second": 7.935, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.11357118274635769, |
|
"eval_loss": 0.9792994856834412, |
|
"eval_runtime": 12.9866, |
|
"eval_samples_per_second": 506.983, |
|
"eval_steps_per_second": 7.931, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.11471836641046232, |
|
"grad_norm": 4.240755558013916, |
|
"learning_rate": 3.8226299694189605e-06, |
|
"loss": 1.7287, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.11471836641046232, |
|
"eval_loss": 0.9751574993133545, |
|
"eval_runtime": 13.0083, |
|
"eval_samples_per_second": 506.136, |
|
"eval_steps_per_second": 7.918, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.11586555007456693, |
|
"eval_loss": 0.9705988764762878, |
|
"eval_runtime": 12.9835, |
|
"eval_samples_per_second": 507.107, |
|
"eval_steps_per_second": 7.933, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.11701273373867156, |
|
"eval_loss": 0.9658572673797607, |
|
"eval_runtime": 13.0627, |
|
"eval_samples_per_second": 504.031, |
|
"eval_steps_per_second": 7.885, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.11815991740277619, |
|
"eval_loss": 0.9614543318748474, |
|
"eval_runtime": 13.0169, |
|
"eval_samples_per_second": 505.803, |
|
"eval_steps_per_second": 7.913, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.1193071010668808, |
|
"eval_loss": 0.9571945667266846, |
|
"eval_runtime": 12.9762, |
|
"eval_samples_per_second": 507.389, |
|
"eval_steps_per_second": 7.938, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.12045428473098543, |
|
"eval_loss": 0.9531480073928833, |
|
"eval_runtime": 13.0036, |
|
"eval_samples_per_second": 506.323, |
|
"eval_steps_per_second": 7.921, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.12160146839509005, |
|
"eval_loss": 0.9493557810783386, |
|
"eval_runtime": 13.0046, |
|
"eval_samples_per_second": 506.282, |
|
"eval_steps_per_second": 7.92, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.12274865205919468, |
|
"eval_loss": 0.9455849528312683, |
|
"eval_runtime": 13.0676, |
|
"eval_samples_per_second": 503.843, |
|
"eval_steps_per_second": 7.882, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.1238958357232993, |
|
"eval_loss": 0.9415374398231506, |
|
"eval_runtime": 13.0698, |
|
"eval_samples_per_second": 503.755, |
|
"eval_steps_per_second": 7.881, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.12504301938740392, |
|
"eval_loss": 0.9376588463783264, |
|
"eval_runtime": 12.9843, |
|
"eval_samples_per_second": 507.072, |
|
"eval_steps_per_second": 7.933, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.12619020305150855, |
|
"grad_norm": 3.3081679344177246, |
|
"learning_rate": 4.204892966360857e-06, |
|
"loss": 1.6312, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.12619020305150855, |
|
"eval_loss": 0.9338813424110413, |
|
"eval_runtime": 13.0365, |
|
"eval_samples_per_second": 505.043, |
|
"eval_steps_per_second": 7.901, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.12733738671561318, |
|
"eval_loss": 0.9302825927734375, |
|
"eval_runtime": 13.0692, |
|
"eval_samples_per_second": 503.779, |
|
"eval_steps_per_second": 7.881, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.1284845703797178, |
|
"eval_loss": 0.9267016053199768, |
|
"eval_runtime": 13.0455, |
|
"eval_samples_per_second": 504.694, |
|
"eval_steps_per_second": 7.895, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.1296317540438224, |
|
"eval_loss": 0.9232119917869568, |
|
"eval_runtime": 13.0051, |
|
"eval_samples_per_second": 506.262, |
|
"eval_steps_per_second": 7.92, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.13077893770792703, |
|
"eval_loss": 0.9197220206260681, |
|
"eval_runtime": 13.0069, |
|
"eval_samples_per_second": 506.192, |
|
"eval_steps_per_second": 7.919, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.13192612137203166, |
|
"eval_loss": 0.9162309765815735, |
|
"eval_runtime": 12.9851, |
|
"eval_samples_per_second": 507.044, |
|
"eval_steps_per_second": 7.932, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.1330733050361363, |
|
"eval_loss": 0.9127652049064636, |
|
"eval_runtime": 12.9768, |
|
"eval_samples_per_second": 507.366, |
|
"eval_steps_per_second": 7.937, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.13422048870024092, |
|
"eval_loss": 0.9096914529800415, |
|
"eval_runtime": 12.9953, |
|
"eval_samples_per_second": 506.645, |
|
"eval_steps_per_second": 7.926, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.13536767236434552, |
|
"eval_loss": 0.9069137573242188, |
|
"eval_runtime": 13.0109, |
|
"eval_samples_per_second": 506.038, |
|
"eval_steps_per_second": 7.916, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.13651485602845015, |
|
"eval_loss": 0.9039744734764099, |
|
"eval_runtime": 12.9772, |
|
"eval_samples_per_second": 507.352, |
|
"eval_steps_per_second": 7.937, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.13766203969255478, |
|
"grad_norm": 3.420077085494995, |
|
"learning_rate": 4.587155963302753e-06, |
|
"loss": 1.5316, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.13766203969255478, |
|
"eval_loss": 0.9009912610054016, |
|
"eval_runtime": 12.9848, |
|
"eval_samples_per_second": 507.056, |
|
"eval_steps_per_second": 7.932, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.1388092233566594, |
|
"eval_loss": 0.8979319930076599, |
|
"eval_runtime": 13.0295, |
|
"eval_samples_per_second": 505.313, |
|
"eval_steps_per_second": 7.905, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.13995640702076403, |
|
"eval_loss": 0.8946565985679626, |
|
"eval_runtime": 13.0119, |
|
"eval_samples_per_second": 505.997, |
|
"eval_steps_per_second": 7.916, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.14110359068486863, |
|
"eval_loss": 0.8915460705757141, |
|
"eval_runtime": 12.9979, |
|
"eval_samples_per_second": 506.545, |
|
"eval_steps_per_second": 7.924, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.14225077434897326, |
|
"eval_loss": 0.8887993097305298, |
|
"eval_runtime": 12.9633, |
|
"eval_samples_per_second": 507.895, |
|
"eval_steps_per_second": 7.945, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.1433979580130779, |
|
"eval_loss": 0.886054277420044, |
|
"eval_runtime": 12.9996, |
|
"eval_samples_per_second": 506.477, |
|
"eval_steps_per_second": 7.923, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.14454514167718252, |
|
"eval_loss": 0.8833284974098206, |
|
"eval_runtime": 13.1133, |
|
"eval_samples_per_second": 502.086, |
|
"eval_steps_per_second": 7.855, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.14569232534128715, |
|
"eval_loss": 0.8806452751159668, |
|
"eval_runtime": 13.0208, |
|
"eval_samples_per_second": 505.654, |
|
"eval_steps_per_second": 7.91, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.14683950900539175, |
|
"eval_loss": 0.8778645992279053, |
|
"eval_runtime": 13.1071, |
|
"eval_samples_per_second": 502.322, |
|
"eval_steps_per_second": 7.858, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.14798669266949638, |
|
"eval_loss": 0.8747658133506775, |
|
"eval_runtime": 13.0362, |
|
"eval_samples_per_second": 505.055, |
|
"eval_steps_per_second": 7.901, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.149133876333601, |
|
"grad_norm": 3.3882789611816406, |
|
"learning_rate": 4.969418960244649e-06, |
|
"loss": 1.4961, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.149133876333601, |
|
"eval_loss": 0.8717615008354187, |
|
"eval_runtime": 13.0104, |
|
"eval_samples_per_second": 506.058, |
|
"eval_steps_per_second": 7.917, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.15028105999770563, |
|
"eval_loss": 0.8690453767776489, |
|
"eval_runtime": 13.0189, |
|
"eval_samples_per_second": 505.727, |
|
"eval_steps_per_second": 7.912, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.15142824366181026, |
|
"eval_loss": 0.8664180040359497, |
|
"eval_runtime": 12.988, |
|
"eval_samples_per_second": 506.931, |
|
"eval_steps_per_second": 7.93, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.1525754273259149, |
|
"eval_loss": 0.8634527921676636, |
|
"eval_runtime": 12.9897, |
|
"eval_samples_per_second": 506.861, |
|
"eval_steps_per_second": 7.929, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.1537226109900195, |
|
"eval_loss": 0.8603318929672241, |
|
"eval_runtime": 13.0269, |
|
"eval_samples_per_second": 505.417, |
|
"eval_steps_per_second": 7.907, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.15486979465412412, |
|
"eval_loss": 0.8573653697967529, |
|
"eval_runtime": 12.9945, |
|
"eval_samples_per_second": 506.677, |
|
"eval_steps_per_second": 7.926, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.15601697831822875, |
|
"eval_loss": 0.8545491695404053, |
|
"eval_runtime": 12.9981, |
|
"eval_samples_per_second": 506.536, |
|
"eval_steps_per_second": 7.924, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.15716416198233338, |
|
"eval_loss": 0.85209721326828, |
|
"eval_runtime": 13.0301, |
|
"eval_samples_per_second": 505.293, |
|
"eval_steps_per_second": 7.905, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.158311345646438, |
|
"eval_loss": 0.8497452139854431, |
|
"eval_runtime": 13.1229, |
|
"eval_samples_per_second": 501.717, |
|
"eval_steps_per_second": 7.849, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.1594585293105426, |
|
"eval_loss": 0.8474416136741638, |
|
"eval_runtime": 13.0624, |
|
"eval_samples_per_second": 504.043, |
|
"eval_steps_per_second": 7.885, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.16060571297464724, |
|
"grad_norm": 3.7449283599853516, |
|
"learning_rate": 5.351681957186545e-06, |
|
"loss": 1.451, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.16060571297464724, |
|
"eval_loss": 0.8452581763267517, |
|
"eval_runtime": 13.0191, |
|
"eval_samples_per_second": 505.72, |
|
"eval_steps_per_second": 7.911, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.16175289663875186, |
|
"eval_loss": 0.8428558111190796, |
|
"eval_runtime": 13.0038, |
|
"eval_samples_per_second": 506.314, |
|
"eval_steps_per_second": 7.921, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.1629000803028565, |
|
"eval_loss": 0.8404139876365662, |
|
"eval_runtime": 12.9904, |
|
"eval_samples_per_second": 506.837, |
|
"eval_steps_per_second": 7.929, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.16404726396696112, |
|
"eval_loss": 0.8380302786827087, |
|
"eval_runtime": 12.9812, |
|
"eval_samples_per_second": 507.195, |
|
"eval_steps_per_second": 7.935, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.16519444763106572, |
|
"eval_loss": 0.8357470631599426, |
|
"eval_runtime": 13.0241, |
|
"eval_samples_per_second": 505.526, |
|
"eval_steps_per_second": 7.908, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.16634163129517035, |
|
"eval_loss": 0.8336036205291748, |
|
"eval_runtime": 13.142, |
|
"eval_samples_per_second": 500.99, |
|
"eval_steps_per_second": 7.837, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.16748881495927498, |
|
"eval_loss": 0.8312162756919861, |
|
"eval_runtime": 12.9935, |
|
"eval_samples_per_second": 506.714, |
|
"eval_steps_per_second": 7.927, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.1686359986233796, |
|
"eval_loss": 0.8288681507110596, |
|
"eval_runtime": 13.1018, |
|
"eval_samples_per_second": 502.527, |
|
"eval_steps_per_second": 7.862, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.16978318228748424, |
|
"eval_loss": 0.8261700868606567, |
|
"eval_runtime": 13.0327, |
|
"eval_samples_per_second": 505.19, |
|
"eval_steps_per_second": 7.903, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.17093036595158884, |
|
"eval_loss": 0.8235771656036377, |
|
"eval_runtime": 12.9895, |
|
"eval_samples_per_second": 506.873, |
|
"eval_steps_per_second": 7.93, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.17207754961569346, |
|
"grad_norm": 2.5695531368255615, |
|
"learning_rate": 5.733944954128441e-06, |
|
"loss": 1.4177, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.17207754961569346, |
|
"eval_loss": 0.8213275074958801, |
|
"eval_runtime": 13.0468, |
|
"eval_samples_per_second": 504.647, |
|
"eval_steps_per_second": 7.895, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.1732247332797981, |
|
"eval_loss": 0.8189388513565063, |
|
"eval_runtime": 12.9877, |
|
"eval_samples_per_second": 506.943, |
|
"eval_steps_per_second": 7.931, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.17437191694390272, |
|
"eval_loss": 0.8168380856513977, |
|
"eval_runtime": 12.9915, |
|
"eval_samples_per_second": 506.793, |
|
"eval_steps_per_second": 7.928, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.17551910060800735, |
|
"eval_loss": 0.814733624458313, |
|
"eval_runtime": 13.1726, |
|
"eval_samples_per_second": 499.824, |
|
"eval_steps_per_second": 7.819, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.17666628427211198, |
|
"eval_loss": 0.8127268552780151, |
|
"eval_runtime": 13.0207, |
|
"eval_samples_per_second": 505.657, |
|
"eval_steps_per_second": 7.91, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.17781346793621658, |
|
"eval_loss": 0.8106749057769775, |
|
"eval_runtime": 13.0973, |
|
"eval_samples_per_second": 502.697, |
|
"eval_steps_per_second": 7.864, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.1789606516003212, |
|
"eval_loss": 0.808172345161438, |
|
"eval_runtime": 13.2116, |
|
"eval_samples_per_second": 498.349, |
|
"eval_steps_per_second": 7.796, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.18010783526442584, |
|
"eval_loss": 0.8059112429618835, |
|
"eval_runtime": 12.9871, |
|
"eval_samples_per_second": 506.963, |
|
"eval_steps_per_second": 7.931, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.18125501892853046, |
|
"eval_loss": 0.8035888671875, |
|
"eval_runtime": 13.0963, |
|
"eval_samples_per_second": 502.737, |
|
"eval_steps_per_second": 7.865, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.1824022025926351, |
|
"eval_loss": 0.8014644384384155, |
|
"eval_runtime": 13.0028, |
|
"eval_samples_per_second": 506.351, |
|
"eval_steps_per_second": 7.921, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.1835493862567397, |
|
"grad_norm": 2.681454658508301, |
|
"learning_rate": 6.1162079510703365e-06, |
|
"loss": 1.3734, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.1835493862567397, |
|
"eval_loss": 0.7993160486221313, |
|
"eval_runtime": 12.9893, |
|
"eval_samples_per_second": 506.879, |
|
"eval_steps_per_second": 7.93, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.18469656992084432, |
|
"eval_loss": 0.7969831228256226, |
|
"eval_runtime": 13.0137, |
|
"eval_samples_per_second": 505.929, |
|
"eval_steps_per_second": 7.915, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.18584375358494895, |
|
"eval_loss": 0.7947937250137329, |
|
"eval_runtime": 13.0467, |
|
"eval_samples_per_second": 504.648, |
|
"eval_steps_per_second": 7.895, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.18699093724905358, |
|
"eval_loss": 0.7922271490097046, |
|
"eval_runtime": 13.1133, |
|
"eval_samples_per_second": 502.086, |
|
"eval_steps_per_second": 7.855, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.1881381209131582, |
|
"eval_loss": 0.790046751499176, |
|
"eval_runtime": 13.0687, |
|
"eval_samples_per_second": 503.801, |
|
"eval_steps_per_second": 7.881, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.1892853045772628, |
|
"eval_loss": 0.7877430319786072, |
|
"eval_runtime": 13.1712, |
|
"eval_samples_per_second": 499.877, |
|
"eval_steps_per_second": 7.82, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.19043248824136744, |
|
"eval_loss": 0.7852274179458618, |
|
"eval_runtime": 13.0721, |
|
"eval_samples_per_second": 503.669, |
|
"eval_steps_per_second": 7.879, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.19157967190547207, |
|
"eval_loss": 0.782863438129425, |
|
"eval_runtime": 12.9741, |
|
"eval_samples_per_second": 507.471, |
|
"eval_steps_per_second": 7.939, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.1927268555695767, |
|
"eval_loss": 0.7804363965988159, |
|
"eval_runtime": 12.9826, |
|
"eval_samples_per_second": 507.142, |
|
"eval_steps_per_second": 7.934, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.19387403923368132, |
|
"eval_loss": 0.7779432535171509, |
|
"eval_runtime": 12.9731, |
|
"eval_samples_per_second": 507.514, |
|
"eval_steps_per_second": 7.94, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.19502122289778592, |
|
"grad_norm": 3.1423346996307373, |
|
"learning_rate": 6.4984709480122335e-06, |
|
"loss": 1.3327, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.19502122289778592, |
|
"eval_loss": 0.7756665945053101, |
|
"eval_runtime": 12.9767, |
|
"eval_samples_per_second": 507.371, |
|
"eval_steps_per_second": 7.937, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.19616840656189055, |
|
"eval_loss": 0.7737661004066467, |
|
"eval_runtime": 12.9633, |
|
"eval_samples_per_second": 507.896, |
|
"eval_steps_per_second": 7.946, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.19731559022599518, |
|
"eval_loss": 0.7719039916992188, |
|
"eval_runtime": 12.9798, |
|
"eval_samples_per_second": 507.248, |
|
"eval_steps_per_second": 7.935, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.1984627738900998, |
|
"eval_loss": 0.7699739933013916, |
|
"eval_runtime": 12.9817, |
|
"eval_samples_per_second": 507.177, |
|
"eval_steps_per_second": 7.934, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.19960995755420444, |
|
"eval_loss": 0.7679038643836975, |
|
"eval_runtime": 13.0593, |
|
"eval_samples_per_second": 504.164, |
|
"eval_steps_per_second": 7.887, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.20075714121830904, |
|
"eval_loss": 0.765801191329956, |
|
"eval_runtime": 12.9827, |
|
"eval_samples_per_second": 507.136, |
|
"eval_steps_per_second": 7.934, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.20190432488241367, |
|
"eval_loss": 0.7640795707702637, |
|
"eval_runtime": 12.9885, |
|
"eval_samples_per_second": 506.908, |
|
"eval_steps_per_second": 7.93, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.2030515085465183, |
|
"eval_loss": 0.7621497511863708, |
|
"eval_runtime": 12.9872, |
|
"eval_samples_per_second": 506.96, |
|
"eval_steps_per_second": 7.931, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.20419869221062292, |
|
"eval_loss": 0.7601312398910522, |
|
"eval_runtime": 12.9805, |
|
"eval_samples_per_second": 507.223, |
|
"eval_steps_per_second": 7.935, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.20534587587472755, |
|
"eval_loss": 0.7580214738845825, |
|
"eval_runtime": 12.9849, |
|
"eval_samples_per_second": 507.05, |
|
"eval_steps_per_second": 7.932, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.20649305953883218, |
|
"grad_norm": 3.4953625202178955, |
|
"learning_rate": 6.880733944954129e-06, |
|
"loss": 1.2804, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.20649305953883218, |
|
"eval_loss": 0.7558028697967529, |
|
"eval_runtime": 12.989, |
|
"eval_samples_per_second": 506.889, |
|
"eval_steps_per_second": 7.93, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.20764024320293678, |
|
"eval_loss": 0.7535884380340576, |
|
"eval_runtime": 12.9795, |
|
"eval_samples_per_second": 507.262, |
|
"eval_steps_per_second": 7.936, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.2087874268670414, |
|
"eval_loss": 0.7514472603797913, |
|
"eval_runtime": 12.9901, |
|
"eval_samples_per_second": 506.846, |
|
"eval_steps_per_second": 7.929, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.20993461053114604, |
|
"eval_loss": 0.7493338584899902, |
|
"eval_runtime": 13.0011, |
|
"eval_samples_per_second": 506.418, |
|
"eval_steps_per_second": 7.922, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.21108179419525067, |
|
"eval_loss": 0.7473368644714355, |
|
"eval_runtime": 13.0424, |
|
"eval_samples_per_second": 504.815, |
|
"eval_steps_per_second": 7.897, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.2122289778593553, |
|
"eval_loss": 0.7450571060180664, |
|
"eval_runtime": 13.1033, |
|
"eval_samples_per_second": 502.468, |
|
"eval_steps_per_second": 7.861, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.2133761615234599, |
|
"eval_loss": 0.7429091930389404, |
|
"eval_runtime": 12.9903, |
|
"eval_samples_per_second": 506.841, |
|
"eval_steps_per_second": 7.929, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.21452334518756452, |
|
"eval_loss": 0.7408153414726257, |
|
"eval_runtime": 12.983, |
|
"eval_samples_per_second": 507.126, |
|
"eval_steps_per_second": 7.933, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.21567052885166915, |
|
"eval_loss": 0.7389461994171143, |
|
"eval_runtime": 12.9922, |
|
"eval_samples_per_second": 506.767, |
|
"eval_steps_per_second": 7.928, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.21681771251577378, |
|
"eval_loss": 0.7368388772010803, |
|
"eval_runtime": 13.0283, |
|
"eval_samples_per_second": 505.36, |
|
"eval_steps_per_second": 7.906, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.2179648961798784, |
|
"grad_norm": 2.869717597961426, |
|
"learning_rate": 7.262996941896026e-06, |
|
"loss": 1.2255, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.2179648961798784, |
|
"eval_loss": 0.73488450050354, |
|
"eval_runtime": 12.9924, |
|
"eval_samples_per_second": 506.759, |
|
"eval_steps_per_second": 7.928, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.219112079843983, |
|
"eval_loss": 0.7328305244445801, |
|
"eval_runtime": 12.9892, |
|
"eval_samples_per_second": 506.881, |
|
"eval_steps_per_second": 7.93, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.22025926350808764, |
|
"eval_loss": 0.7309767007827759, |
|
"eval_runtime": 13.1016, |
|
"eval_samples_per_second": 502.534, |
|
"eval_steps_per_second": 7.862, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.22140644717219227, |
|
"eval_loss": 0.7293325662612915, |
|
"eval_runtime": 12.9943, |
|
"eval_samples_per_second": 506.684, |
|
"eval_steps_per_second": 7.927, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.2225536308362969, |
|
"eval_loss": 0.7277292013168335, |
|
"eval_runtime": 13.0122, |
|
"eval_samples_per_second": 505.986, |
|
"eval_steps_per_second": 7.916, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.22370081450040152, |
|
"eval_loss": 0.7258683443069458, |
|
"eval_runtime": 13.0042, |
|
"eval_samples_per_second": 506.298, |
|
"eval_steps_per_second": 7.921, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.22484799816450612, |
|
"eval_loss": 0.7239750027656555, |
|
"eval_runtime": 13.0619, |
|
"eval_samples_per_second": 504.061, |
|
"eval_steps_per_second": 7.886, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.22599518182861075, |
|
"eval_loss": 0.7221319079399109, |
|
"eval_runtime": 12.9944, |
|
"eval_samples_per_second": 506.679, |
|
"eval_steps_per_second": 7.926, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.22714236549271538, |
|
"eval_loss": 0.720324695110321, |
|
"eval_runtime": 12.982, |
|
"eval_samples_per_second": 507.165, |
|
"eval_steps_per_second": 7.934, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.22828954915682, |
|
"eval_loss": 0.7184233069419861, |
|
"eval_runtime": 12.9988, |
|
"eval_samples_per_second": 506.507, |
|
"eval_steps_per_second": 7.924, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.22943673282092464, |
|
"grad_norm": 9.275369644165039, |
|
"learning_rate": 7.645259938837921e-06, |
|
"loss": 1.2635, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.22943673282092464, |
|
"eval_loss": 0.7165008187294006, |
|
"eval_runtime": 12.9966, |
|
"eval_samples_per_second": 506.594, |
|
"eval_steps_per_second": 7.925, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.23058391648502927, |
|
"eval_loss": 0.7150009870529175, |
|
"eval_runtime": 12.9901, |
|
"eval_samples_per_second": 506.849, |
|
"eval_steps_per_second": 7.929, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.23173110014913387, |
|
"eval_loss": 0.7134894728660583, |
|
"eval_runtime": 13.0392, |
|
"eval_samples_per_second": 504.938, |
|
"eval_steps_per_second": 7.899, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.2328782838132385, |
|
"eval_loss": 0.7117303013801575, |
|
"eval_runtime": 13.0384, |
|
"eval_samples_per_second": 504.971, |
|
"eval_steps_per_second": 7.9, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.23402546747734312, |
|
"eval_loss": 0.7099109292030334, |
|
"eval_runtime": 12.9879, |
|
"eval_samples_per_second": 506.932, |
|
"eval_steps_per_second": 7.93, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.23517265114144775, |
|
"eval_loss": 0.7084246873855591, |
|
"eval_runtime": 12.9995, |
|
"eval_samples_per_second": 506.481, |
|
"eval_steps_per_second": 7.923, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.23631983480555238, |
|
"eval_loss": 0.7067718505859375, |
|
"eval_runtime": 12.9881, |
|
"eval_samples_per_second": 506.925, |
|
"eval_steps_per_second": 7.93, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.23746701846965698, |
|
"eval_loss": 0.7053564786911011, |
|
"eval_runtime": 13.1243, |
|
"eval_samples_per_second": 501.665, |
|
"eval_steps_per_second": 7.848, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.2386142021337616, |
|
"eval_loss": 0.7037129402160645, |
|
"eval_runtime": 12.9868, |
|
"eval_samples_per_second": 506.975, |
|
"eval_steps_per_second": 7.931, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.23976138579786624, |
|
"eval_loss": 0.7023361921310425, |
|
"eval_runtime": 12.9909, |
|
"eval_samples_per_second": 506.818, |
|
"eval_steps_per_second": 7.929, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.24090856946197087, |
|
"grad_norm": 2.9726195335388184, |
|
"learning_rate": 8.027522935779817e-06, |
|
"loss": 1.1912, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.24090856946197087, |
|
"eval_loss": 0.7009308338165283, |
|
"eval_runtime": 13.1789, |
|
"eval_samples_per_second": 499.587, |
|
"eval_steps_per_second": 7.816, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.2420557531260755, |
|
"eval_loss": 0.6990575790405273, |
|
"eval_runtime": 13.0982, |
|
"eval_samples_per_second": 502.665, |
|
"eval_steps_per_second": 7.864, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.2432029367901801, |
|
"eval_loss": 0.6973636746406555, |
|
"eval_runtime": 13.0024, |
|
"eval_samples_per_second": 506.369, |
|
"eval_steps_per_second": 7.922, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.24435012045428472, |
|
"eval_loss": 0.6962077617645264, |
|
"eval_runtime": 12.9266, |
|
"eval_samples_per_second": 509.337, |
|
"eval_steps_per_second": 7.968, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.24549730411838935, |
|
"eval_loss": 0.6949887275695801, |
|
"eval_runtime": 12.9495, |
|
"eval_samples_per_second": 508.438, |
|
"eval_steps_per_second": 7.954, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.24664448778249398, |
|
"eval_loss": 0.6937941312789917, |
|
"eval_runtime": 12.9625, |
|
"eval_samples_per_second": 507.926, |
|
"eval_steps_per_second": 7.946, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.2477916714465986, |
|
"eval_loss": 0.6922134160995483, |
|
"eval_runtime": 13.0384, |
|
"eval_samples_per_second": 504.971, |
|
"eval_steps_per_second": 7.9, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.2489388551107032, |
|
"eval_loss": 0.6908959150314331, |
|
"eval_runtime": 13.0162, |
|
"eval_samples_per_second": 505.831, |
|
"eval_steps_per_second": 7.913, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.25008603877480784, |
|
"eval_loss": 0.6896905899047852, |
|
"eval_runtime": 13.0281, |
|
"eval_samples_per_second": 505.368, |
|
"eval_steps_per_second": 7.906, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.2512332224389125, |
|
"eval_loss": 0.6883853673934937, |
|
"eval_runtime": 13.059, |
|
"eval_samples_per_second": 504.173, |
|
"eval_steps_per_second": 7.887, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.2523804061030171, |
|
"grad_norm": 9.038525581359863, |
|
"learning_rate": 8.409785932721713e-06, |
|
"loss": 1.2144, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.2523804061030171, |
|
"eval_loss": 0.68684321641922, |
|
"eval_runtime": 12.9342, |
|
"eval_samples_per_second": 509.039, |
|
"eval_steps_per_second": 7.963, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.2535275897671217, |
|
"eval_loss": 0.6855539679527283, |
|
"eval_runtime": 12.947, |
|
"eval_samples_per_second": 508.536, |
|
"eval_steps_per_second": 7.956, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.25467477343122635, |
|
"eval_loss": 0.684305727481842, |
|
"eval_runtime": 13.0482, |
|
"eval_samples_per_second": 504.59, |
|
"eval_steps_per_second": 7.894, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.25582195709533095, |
|
"eval_loss": 0.6828807592391968, |
|
"eval_runtime": 13.0317, |
|
"eval_samples_per_second": 505.23, |
|
"eval_steps_per_second": 7.904, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.2569691407594356, |
|
"eval_loss": 0.6817071437835693, |
|
"eval_runtime": 13.0489, |
|
"eval_samples_per_second": 504.564, |
|
"eval_steps_per_second": 7.893, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.2581163244235402, |
|
"eval_loss": 0.6803813576698303, |
|
"eval_runtime": 13.0166, |
|
"eval_samples_per_second": 505.817, |
|
"eval_steps_per_second": 7.913, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.2592635080876448, |
|
"eval_loss": 0.6788561344146729, |
|
"eval_runtime": 13.018, |
|
"eval_samples_per_second": 505.76, |
|
"eval_steps_per_second": 7.912, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.26041069175174947, |
|
"eval_loss": 0.6775069832801819, |
|
"eval_runtime": 12.9958, |
|
"eval_samples_per_second": 506.623, |
|
"eval_steps_per_second": 7.926, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.26155787541585407, |
|
"eval_loss": 0.6762964725494385, |
|
"eval_runtime": 12.9456, |
|
"eval_samples_per_second": 508.59, |
|
"eval_steps_per_second": 7.956, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.2627050590799587, |
|
"eval_loss": 0.6750586032867432, |
|
"eval_runtime": 12.9511, |
|
"eval_samples_per_second": 508.372, |
|
"eval_steps_per_second": 7.953, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.2638522427440633, |
|
"grad_norm": 3.329761266708374, |
|
"learning_rate": 8.79204892966361e-06, |
|
"loss": 1.1498, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.2638522427440633, |
|
"eval_loss": 0.6738935112953186, |
|
"eval_runtime": 13.0495, |
|
"eval_samples_per_second": 504.542, |
|
"eval_steps_per_second": 7.893, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.2649994264081679, |
|
"eval_loss": 0.6725099682807922, |
|
"eval_runtime": 13.0328, |
|
"eval_samples_per_second": 505.188, |
|
"eval_steps_per_second": 7.903, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.2661466100722726, |
|
"eval_loss": 0.6710599660873413, |
|
"eval_runtime": 13.0505, |
|
"eval_samples_per_second": 504.502, |
|
"eval_steps_per_second": 7.892, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.2672937937363772, |
|
"eval_loss": 0.6697894930839539, |
|
"eval_runtime": 13.0616, |
|
"eval_samples_per_second": 504.074, |
|
"eval_steps_per_second": 7.886, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.26844097740048184, |
|
"eval_loss": 0.668381929397583, |
|
"eval_runtime": 12.9197, |
|
"eval_samples_per_second": 509.608, |
|
"eval_steps_per_second": 7.972, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.26958816106458644, |
|
"eval_loss": 0.6666426062583923, |
|
"eval_runtime": 13.0254, |
|
"eval_samples_per_second": 505.475, |
|
"eval_steps_per_second": 7.908, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.27073534472869104, |
|
"eval_loss": 0.6652824878692627, |
|
"eval_runtime": 13.0301, |
|
"eval_samples_per_second": 505.293, |
|
"eval_steps_per_second": 7.905, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.2718825283927957, |
|
"eval_loss": 0.6638170480728149, |
|
"eval_runtime": 12.917, |
|
"eval_samples_per_second": 509.715, |
|
"eval_steps_per_second": 7.974, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.2730297120569003, |
|
"eval_loss": 0.6620729565620422, |
|
"eval_runtime": 13.0292, |
|
"eval_samples_per_second": 505.327, |
|
"eval_steps_per_second": 7.905, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.27417689572100495, |
|
"eval_loss": 0.660891056060791, |
|
"eval_runtime": 13.022, |
|
"eval_samples_per_second": 505.604, |
|
"eval_steps_per_second": 7.91, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.27532407938510955, |
|
"grad_norm": 2.91483211517334, |
|
"learning_rate": 9.174311926605506e-06, |
|
"loss": 1.1446, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.27532407938510955, |
|
"eval_loss": 0.6595732569694519, |
|
"eval_runtime": 13.036, |
|
"eval_samples_per_second": 505.063, |
|
"eval_steps_per_second": 7.901, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.27647126304921416, |
|
"eval_loss": 0.6582459807395935, |
|
"eval_runtime": 12.9309, |
|
"eval_samples_per_second": 509.168, |
|
"eval_steps_per_second": 7.965, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.2776184467133188, |
|
"eval_loss": 0.6568032503128052, |
|
"eval_runtime": 13.0039, |
|
"eval_samples_per_second": 506.309, |
|
"eval_steps_per_second": 7.921, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.2787656303774234, |
|
"eval_loss": 0.6553359031677246, |
|
"eval_runtime": 12.9157, |
|
"eval_samples_per_second": 509.766, |
|
"eval_steps_per_second": 7.975, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.27991281404152807, |
|
"eval_loss": 0.6541372537612915, |
|
"eval_runtime": 12.9901, |
|
"eval_samples_per_second": 506.846, |
|
"eval_steps_per_second": 7.929, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.28105999770563267, |
|
"eval_loss": 0.6527132391929626, |
|
"eval_runtime": 12.9209, |
|
"eval_samples_per_second": 509.562, |
|
"eval_steps_per_second": 7.972, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.28220718136973727, |
|
"eval_loss": 0.6512923836708069, |
|
"eval_runtime": 12.9637, |
|
"eval_samples_per_second": 507.879, |
|
"eval_steps_per_second": 7.945, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.2833543650338419, |
|
"eval_loss": 0.6496260762214661, |
|
"eval_runtime": 13.0604, |
|
"eval_samples_per_second": 504.119, |
|
"eval_steps_per_second": 7.886, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.2845015486979465, |
|
"eval_loss": 0.6483332514762878, |
|
"eval_runtime": 13.0233, |
|
"eval_samples_per_second": 505.557, |
|
"eval_steps_per_second": 7.909, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.2856487323620512, |
|
"eval_loss": 0.647476077079773, |
|
"eval_runtime": 13.0344, |
|
"eval_samples_per_second": 505.124, |
|
"eval_steps_per_second": 7.902, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.2867959160261558, |
|
"grad_norm": 3.6412861347198486, |
|
"learning_rate": 9.556574923547402e-06, |
|
"loss": 1.1309, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.2867959160261558, |
|
"eval_loss": 0.6465412378311157, |
|
"eval_runtime": 12.9143, |
|
"eval_samples_per_second": 509.824, |
|
"eval_steps_per_second": 7.976, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.2879430996902604, |
|
"eval_loss": 0.6455466747283936, |
|
"eval_runtime": 13.0269, |
|
"eval_samples_per_second": 505.417, |
|
"eval_steps_per_second": 7.907, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.28909028335436504, |
|
"eval_loss": 0.6446804404258728, |
|
"eval_runtime": 13.0271, |
|
"eval_samples_per_second": 505.407, |
|
"eval_steps_per_second": 7.907, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.29023746701846964, |
|
"eval_loss": 0.6436929702758789, |
|
"eval_runtime": 13.0149, |
|
"eval_samples_per_second": 505.882, |
|
"eval_steps_per_second": 7.914, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.2913846506825743, |
|
"eval_loss": 0.6427727341651917, |
|
"eval_runtime": 13.0187, |
|
"eval_samples_per_second": 505.734, |
|
"eval_steps_per_second": 7.912, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.2925318343466789, |
|
"eval_loss": 0.6415053009986877, |
|
"eval_runtime": 13.0192, |
|
"eval_samples_per_second": 505.716, |
|
"eval_steps_per_second": 7.911, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.2936790180107835, |
|
"eval_loss": 0.6403249502182007, |
|
"eval_runtime": 13.0143, |
|
"eval_samples_per_second": 505.905, |
|
"eval_steps_per_second": 7.914, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.29482620167488816, |
|
"eval_loss": 0.639183759689331, |
|
"eval_runtime": 13.0127, |
|
"eval_samples_per_second": 505.966, |
|
"eval_steps_per_second": 7.915, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.29597338533899276, |
|
"eval_loss": 0.6380952000617981, |
|
"eval_runtime": 13.0253, |
|
"eval_samples_per_second": 505.478, |
|
"eval_steps_per_second": 7.908, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.2971205690030974, |
|
"eval_loss": 0.6371238231658936, |
|
"eval_runtime": 12.9266, |
|
"eval_samples_per_second": 509.339, |
|
"eval_steps_per_second": 7.968, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.298267752667202, |
|
"grad_norm": 3.0183558464050293, |
|
"learning_rate": 9.938837920489298e-06, |
|
"loss": 1.1006, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.298267752667202, |
|
"eval_loss": 0.6357853412628174, |
|
"eval_runtime": 12.9136, |
|
"eval_samples_per_second": 509.848, |
|
"eval_steps_per_second": 7.976, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.29941493633130667, |
|
"eval_loss": 0.6347528696060181, |
|
"eval_runtime": 13.031, |
|
"eval_samples_per_second": 505.258, |
|
"eval_steps_per_second": 7.904, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.30056211999541127, |
|
"eval_loss": 0.634018063545227, |
|
"eval_runtime": 13.0415, |
|
"eval_samples_per_second": 504.85, |
|
"eval_steps_per_second": 7.898, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.30170930365951587, |
|
"eval_loss": 0.6330114006996155, |
|
"eval_runtime": 12.9387, |
|
"eval_samples_per_second": 508.863, |
|
"eval_steps_per_second": 7.961, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.3028564873236205, |
|
"eval_loss": 0.6319145560264587, |
|
"eval_runtime": 12.919, |
|
"eval_samples_per_second": 509.636, |
|
"eval_steps_per_second": 7.973, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.3040036709877251, |
|
"eval_loss": 0.6307594180107117, |
|
"eval_runtime": 12.9268, |
|
"eval_samples_per_second": 509.328, |
|
"eval_steps_per_second": 7.968, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.3051508546518298, |
|
"eval_loss": 0.6300274133682251, |
|
"eval_runtime": 13.0, |
|
"eval_samples_per_second": 506.461, |
|
"eval_steps_per_second": 7.923, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.3062980383159344, |
|
"eval_loss": 0.6291391849517822, |
|
"eval_runtime": 13.023, |
|
"eval_samples_per_second": 505.566, |
|
"eval_steps_per_second": 7.909, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.307445221980039, |
|
"eval_loss": 0.6279686093330383, |
|
"eval_runtime": 12.9099, |
|
"eval_samples_per_second": 509.995, |
|
"eval_steps_per_second": 7.978, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.30859240564414364, |
|
"eval_loss": 0.6267827153205872, |
|
"eval_runtime": 12.9942, |
|
"eval_samples_per_second": 506.687, |
|
"eval_steps_per_second": 7.927, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.30973958930824824, |
|
"grad_norm": 2.923506021499634, |
|
"learning_rate": 9.964308476736776e-06, |
|
"loss": 1.0772, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.30973958930824824, |
|
"eval_loss": 0.6254075765609741, |
|
"eval_runtime": 13.0366, |
|
"eval_samples_per_second": 505.041, |
|
"eval_steps_per_second": 7.901, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.3108867729723529, |
|
"eval_loss": 0.6242946982383728, |
|
"eval_runtime": 13.0323, |
|
"eval_samples_per_second": 505.207, |
|
"eval_steps_per_second": 7.903, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.3120339566364575, |
|
"eval_loss": 0.6231787204742432, |
|
"eval_runtime": 12.9273, |
|
"eval_samples_per_second": 509.309, |
|
"eval_steps_per_second": 7.968, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.3131811403005621, |
|
"eval_loss": 0.62239009141922, |
|
"eval_runtime": 12.9153, |
|
"eval_samples_per_second": 509.781, |
|
"eval_steps_per_second": 7.975, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.31432832396466676, |
|
"eval_loss": 0.6214902400970459, |
|
"eval_runtime": 12.9178, |
|
"eval_samples_per_second": 509.684, |
|
"eval_steps_per_second": 7.973, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.31547550762877136, |
|
"eval_loss": 0.6204728484153748, |
|
"eval_runtime": 12.9141, |
|
"eval_samples_per_second": 509.831, |
|
"eval_steps_per_second": 7.976, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.316622691292876, |
|
"eval_loss": 0.6194381713867188, |
|
"eval_runtime": 12.9151, |
|
"eval_samples_per_second": 509.791, |
|
"eval_steps_per_second": 7.975, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.3177698749569806, |
|
"eval_loss": 0.6183201670646667, |
|
"eval_runtime": 12.9127, |
|
"eval_samples_per_second": 509.887, |
|
"eval_steps_per_second": 7.977, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.3189170586210852, |
|
"eval_loss": 0.6171374917030334, |
|
"eval_runtime": 12.9112, |
|
"eval_samples_per_second": 509.944, |
|
"eval_steps_per_second": 7.978, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.32006424228518987, |
|
"eval_loss": 0.6160247921943665, |
|
"eval_runtime": 13.0361, |
|
"eval_samples_per_second": 505.06, |
|
"eval_steps_per_second": 7.901, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.32121142594929447, |
|
"grad_norm": 2.8031253814697266, |
|
"learning_rate": 9.92181856809008e-06, |
|
"loss": 1.0648, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.32121142594929447, |
|
"eval_loss": 0.6153244376182556, |
|
"eval_runtime": 13.0215, |
|
"eval_samples_per_second": 505.624, |
|
"eval_steps_per_second": 7.91, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.3223586096133991, |
|
"eval_loss": 0.6140704154968262, |
|
"eval_runtime": 13.0201, |
|
"eval_samples_per_second": 505.679, |
|
"eval_steps_per_second": 7.911, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.32350579327750373, |
|
"eval_loss": 0.6129491329193115, |
|
"eval_runtime": 13.0287, |
|
"eval_samples_per_second": 505.347, |
|
"eval_steps_per_second": 7.906, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.32465297694160833, |
|
"eval_loss": 0.6119452118873596, |
|
"eval_runtime": 13.045, |
|
"eval_samples_per_second": 504.716, |
|
"eval_steps_per_second": 7.896, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.325800160605713, |
|
"eval_loss": 0.6109396815299988, |
|
"eval_runtime": 13.0301, |
|
"eval_samples_per_second": 505.29, |
|
"eval_steps_per_second": 7.905, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.3269473442698176, |
|
"eval_loss": 0.6099222898483276, |
|
"eval_runtime": 13.0261, |
|
"eval_samples_per_second": 505.446, |
|
"eval_steps_per_second": 7.907, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.32809452793392224, |
|
"eval_loss": 0.6087589859962463, |
|
"eval_runtime": 12.9193, |
|
"eval_samples_per_second": 509.625, |
|
"eval_steps_per_second": 7.973, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.32924171159802684, |
|
"eval_loss": 0.6078950762748718, |
|
"eval_runtime": 12.9854, |
|
"eval_samples_per_second": 507.031, |
|
"eval_steps_per_second": 7.932, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.33038889526213144, |
|
"eval_loss": 0.60728919506073, |
|
"eval_runtime": 13.0282, |
|
"eval_samples_per_second": 505.366, |
|
"eval_steps_per_second": 7.906, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.3315360789262361, |
|
"eval_loss": 0.606285810470581, |
|
"eval_runtime": 13.0385, |
|
"eval_samples_per_second": 504.964, |
|
"eval_steps_per_second": 7.9, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.3326832625903407, |
|
"grad_norm": 3.0763423442840576, |
|
"learning_rate": 9.879328659443383e-06, |
|
"loss": 1.0398, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.3326832625903407, |
|
"eval_loss": 0.6053714752197266, |
|
"eval_runtime": 13.0412, |
|
"eval_samples_per_second": 504.863, |
|
"eval_steps_per_second": 7.898, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.33383044625444536, |
|
"eval_loss": 0.6043825149536133, |
|
"eval_runtime": 13.0358, |
|
"eval_samples_per_second": 505.072, |
|
"eval_steps_per_second": 7.901, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.33497762991854996, |
|
"eval_loss": 0.6032926440238953, |
|
"eval_runtime": 13.0011, |
|
"eval_samples_per_second": 506.418, |
|
"eval_steps_per_second": 7.922, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.33612481358265456, |
|
"eval_loss": 0.6022117137908936, |
|
"eval_runtime": 12.922, |
|
"eval_samples_per_second": 509.517, |
|
"eval_steps_per_second": 7.971, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.3372719972467592, |
|
"eval_loss": 0.6011677980422974, |
|
"eval_runtime": 12.9117, |
|
"eval_samples_per_second": 509.924, |
|
"eval_steps_per_second": 7.977, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.3384191809108638, |
|
"eval_loss": 0.6003371477127075, |
|
"eval_runtime": 13.0239, |
|
"eval_samples_per_second": 505.531, |
|
"eval_steps_per_second": 7.909, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.33956636457496847, |
|
"eval_loss": 0.5993051528930664, |
|
"eval_runtime": 12.9359, |
|
"eval_samples_per_second": 508.971, |
|
"eval_steps_per_second": 7.962, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.34071354823907307, |
|
"eval_loss": 0.5986045598983765, |
|
"eval_runtime": 12.9502, |
|
"eval_samples_per_second": 508.409, |
|
"eval_steps_per_second": 7.954, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.3418607319031777, |
|
"eval_loss": 0.5977791547775269, |
|
"eval_runtime": 13.0367, |
|
"eval_samples_per_second": 505.037, |
|
"eval_steps_per_second": 7.901, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.34300791556728233, |
|
"eval_loss": 0.596732497215271, |
|
"eval_runtime": 13.0228, |
|
"eval_samples_per_second": 505.576, |
|
"eval_steps_per_second": 7.909, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.34415509923138693, |
|
"grad_norm": 2.941866636276245, |
|
"learning_rate": 9.836838750796687e-06, |
|
"loss": 1.0256, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.34415509923138693, |
|
"eval_loss": 0.5959278345108032, |
|
"eval_runtime": 12.9239, |
|
"eval_samples_per_second": 509.443, |
|
"eval_steps_per_second": 7.97, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.3453022828954916, |
|
"eval_loss": 0.5946770906448364, |
|
"eval_runtime": 12.9123, |
|
"eval_samples_per_second": 509.903, |
|
"eval_steps_per_second": 7.977, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.3464494665595962, |
|
"eval_loss": 0.593720555305481, |
|
"eval_runtime": 12.92, |
|
"eval_samples_per_second": 509.596, |
|
"eval_steps_per_second": 7.972, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.3475966502237008, |
|
"eval_loss": 0.5929400324821472, |
|
"eval_runtime": 12.9184, |
|
"eval_samples_per_second": 509.661, |
|
"eval_steps_per_second": 7.973, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.34874383388780544, |
|
"eval_loss": 0.5920357704162598, |
|
"eval_runtime": 12.9232, |
|
"eval_samples_per_second": 509.47, |
|
"eval_steps_per_second": 7.97, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.34989101755191004, |
|
"eval_loss": 0.5908279418945312, |
|
"eval_runtime": 12.9197, |
|
"eval_samples_per_second": 509.609, |
|
"eval_steps_per_second": 7.972, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.3510382012160147, |
|
"eval_loss": 0.5896555185317993, |
|
"eval_runtime": 13.0254, |
|
"eval_samples_per_second": 505.473, |
|
"eval_steps_per_second": 7.908, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.3521853848801193, |
|
"eval_loss": 0.5887726545333862, |
|
"eval_runtime": 13.0242, |
|
"eval_samples_per_second": 505.522, |
|
"eval_steps_per_second": 7.908, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.35333256854422396, |
|
"eval_loss": 0.5881541967391968, |
|
"eval_runtime": 13.0358, |
|
"eval_samples_per_second": 505.069, |
|
"eval_steps_per_second": 7.901, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.35447975220832856, |
|
"eval_loss": 0.5873861312866211, |
|
"eval_runtime": 12.9582, |
|
"eval_samples_per_second": 508.095, |
|
"eval_steps_per_second": 7.949, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.35562693587243316, |
|
"grad_norm": 3.1805124282836914, |
|
"learning_rate": 9.79434884214999e-06, |
|
"loss": 1.0489, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.35562693587243316, |
|
"eval_loss": 0.5868309736251831, |
|
"eval_runtime": 13.0375, |
|
"eval_samples_per_second": 505.003, |
|
"eval_steps_per_second": 7.9, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.3567741195365378, |
|
"eval_loss": 0.5860410928726196, |
|
"eval_runtime": 12.9224, |
|
"eval_samples_per_second": 509.501, |
|
"eval_steps_per_second": 7.971, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.3579213032006424, |
|
"eval_loss": 0.5853646993637085, |
|
"eval_runtime": 12.9171, |
|
"eval_samples_per_second": 509.711, |
|
"eval_steps_per_second": 7.974, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.35906848686474707, |
|
"eval_loss": 0.5839424729347229, |
|
"eval_runtime": 12.9136, |
|
"eval_samples_per_second": 509.848, |
|
"eval_steps_per_second": 7.976, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.3602156705288517, |
|
"eval_loss": 0.5830027461051941, |
|
"eval_runtime": 12.9177, |
|
"eval_samples_per_second": 509.686, |
|
"eval_steps_per_second": 7.974, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.3613628541929563, |
|
"eval_loss": 0.5822171568870544, |
|
"eval_runtime": 13.0378, |
|
"eval_samples_per_second": 504.995, |
|
"eval_steps_per_second": 7.9, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.36251003785706093, |
|
"eval_loss": 0.5814335942268372, |
|
"eval_runtime": 12.9196, |
|
"eval_samples_per_second": 509.612, |
|
"eval_steps_per_second": 7.972, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.36365722152116553, |
|
"eval_loss": 0.5807969570159912, |
|
"eval_runtime": 12.9184, |
|
"eval_samples_per_second": 509.659, |
|
"eval_steps_per_second": 7.973, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.3648044051852702, |
|
"eval_loss": 0.5801600813865662, |
|
"eval_runtime": 12.9016, |
|
"eval_samples_per_second": 510.323, |
|
"eval_steps_per_second": 7.983, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.3659515888493748, |
|
"eval_loss": 0.5793916583061218, |
|
"eval_runtime": 12.9241, |
|
"eval_samples_per_second": 509.434, |
|
"eval_steps_per_second": 7.97, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.3670987725134794, |
|
"grad_norm": 3.2195188999176025, |
|
"learning_rate": 9.751858933503294e-06, |
|
"loss": 1.038, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.3670987725134794, |
|
"eval_loss": 0.578770637512207, |
|
"eval_runtime": 13.0163, |
|
"eval_samples_per_second": 505.828, |
|
"eval_steps_per_second": 7.913, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.36824595617758404, |
|
"eval_loss": 0.5778489112854004, |
|
"eval_runtime": 13.0447, |
|
"eval_samples_per_second": 504.726, |
|
"eval_steps_per_second": 7.896, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.36939313984168864, |
|
"eval_loss": 0.577012300491333, |
|
"eval_runtime": 13.038, |
|
"eval_samples_per_second": 504.984, |
|
"eval_steps_per_second": 7.9, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.3705403235057933, |
|
"eval_loss": 0.5763078331947327, |
|
"eval_runtime": 12.9775, |
|
"eval_samples_per_second": 507.339, |
|
"eval_steps_per_second": 7.937, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.3716875071698979, |
|
"eval_loss": 0.5751996040344238, |
|
"eval_runtime": 12.9227, |
|
"eval_samples_per_second": 509.492, |
|
"eval_steps_per_second": 7.97, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.3728346908340025, |
|
"eval_loss": 0.5744589567184448, |
|
"eval_runtime": 12.9251, |
|
"eval_samples_per_second": 509.398, |
|
"eval_steps_per_second": 7.969, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.37398187449810716, |
|
"eval_loss": 0.5737271308898926, |
|
"eval_runtime": 12.9481, |
|
"eval_samples_per_second": 508.493, |
|
"eval_steps_per_second": 7.955, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.37512905816221176, |
|
"eval_loss": 0.5727923512458801, |
|
"eval_runtime": 13.0412, |
|
"eval_samples_per_second": 504.861, |
|
"eval_steps_per_second": 7.898, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.3762762418263164, |
|
"eval_loss": 0.5719892382621765, |
|
"eval_runtime": 12.9946, |
|
"eval_samples_per_second": 506.674, |
|
"eval_steps_per_second": 7.926, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.377423425490421, |
|
"eval_loss": 0.5713133811950684, |
|
"eval_runtime": 12.9204, |
|
"eval_samples_per_second": 509.58, |
|
"eval_steps_per_second": 7.972, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.3785706091545256, |
|
"grad_norm": 2.951904058456421, |
|
"learning_rate": 9.709369024856598e-06, |
|
"loss": 1.0058, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.3785706091545256, |
|
"eval_loss": 0.5707195401191711, |
|
"eval_runtime": 12.9678, |
|
"eval_samples_per_second": 507.718, |
|
"eval_steps_per_second": 7.943, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.3797177928186303, |
|
"eval_loss": 0.5699546933174133, |
|
"eval_runtime": 12.9635, |
|
"eval_samples_per_second": 507.889, |
|
"eval_steps_per_second": 7.945, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.3808649764827349, |
|
"eval_loss": 0.5689657926559448, |
|
"eval_runtime": 12.9065, |
|
"eval_samples_per_second": 510.131, |
|
"eval_steps_per_second": 7.98, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.38201216014683953, |
|
"eval_loss": 0.5680745244026184, |
|
"eval_runtime": 12.9206, |
|
"eval_samples_per_second": 509.574, |
|
"eval_steps_per_second": 7.972, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.38315934381094413, |
|
"eval_loss": 0.5672716498374939, |
|
"eval_runtime": 12.9202, |
|
"eval_samples_per_second": 509.591, |
|
"eval_steps_per_second": 7.972, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.38430652747504873, |
|
"eval_loss": 0.5669021606445312, |
|
"eval_runtime": 12.9282, |
|
"eval_samples_per_second": 509.275, |
|
"eval_steps_per_second": 7.967, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.3854537111391534, |
|
"eval_loss": 0.5667062997817993, |
|
"eval_runtime": 12.9165, |
|
"eval_samples_per_second": 509.737, |
|
"eval_steps_per_second": 7.974, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.386600894803258, |
|
"eval_loss": 0.5664732456207275, |
|
"eval_runtime": 12.9163, |
|
"eval_samples_per_second": 509.742, |
|
"eval_steps_per_second": 7.974, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.38774807846736264, |
|
"eval_loss": 0.5658523440361023, |
|
"eval_runtime": 12.9214, |
|
"eval_samples_per_second": 509.542, |
|
"eval_steps_per_second": 7.971, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.38889526213146725, |
|
"eval_loss": 0.5650349855422974, |
|
"eval_runtime": 12.9209, |
|
"eval_samples_per_second": 509.562, |
|
"eval_steps_per_second": 7.972, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.39004244579557185, |
|
"grad_norm": 2.8982646465301514, |
|
"learning_rate": 9.666879116209901e-06, |
|
"loss": 1.0413, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.39004244579557185, |
|
"eval_loss": 0.5644938349723816, |
|
"eval_runtime": 12.9147, |
|
"eval_samples_per_second": 509.808, |
|
"eval_steps_per_second": 7.975, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.3911896294596765, |
|
"eval_loss": 0.5641273260116577, |
|
"eval_runtime": 12.9416, |
|
"eval_samples_per_second": 508.745, |
|
"eval_steps_per_second": 7.959, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.3923368131237811, |
|
"eval_loss": 0.5635139346122742, |
|
"eval_runtime": 12.9481, |
|
"eval_samples_per_second": 508.491, |
|
"eval_steps_per_second": 7.955, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.39348399678788576, |
|
"eval_loss": 0.5628678202629089, |
|
"eval_runtime": 12.9965, |
|
"eval_samples_per_second": 506.596, |
|
"eval_steps_per_second": 7.925, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.39463118045199036, |
|
"eval_loss": 0.5622407793998718, |
|
"eval_runtime": 12.9926, |
|
"eval_samples_per_second": 506.748, |
|
"eval_steps_per_second": 7.928, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.39577836411609496, |
|
"eval_loss": 0.5617032051086426, |
|
"eval_runtime": 12.92, |
|
"eval_samples_per_second": 509.598, |
|
"eval_steps_per_second": 7.972, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.3969255477801996, |
|
"eval_loss": 0.5613731741905212, |
|
"eval_runtime": 12.9152, |
|
"eval_samples_per_second": 509.786, |
|
"eval_steps_per_second": 7.975, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.3980727314443042, |
|
"eval_loss": 0.5606963038444519, |
|
"eval_runtime": 12.9194, |
|
"eval_samples_per_second": 509.62, |
|
"eval_steps_per_second": 7.972, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.3992199151084089, |
|
"eval_loss": 0.5602635145187378, |
|
"eval_runtime": 12.9242, |
|
"eval_samples_per_second": 509.432, |
|
"eval_steps_per_second": 7.97, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.4003670987725135, |
|
"eval_loss": 0.5598347783088684, |
|
"eval_runtime": 12.9579, |
|
"eval_samples_per_second": 508.107, |
|
"eval_steps_per_second": 7.949, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.4015142824366181, |
|
"grad_norm": 3.217618465423584, |
|
"learning_rate": 9.624389207563205e-06, |
|
"loss": 0.938, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.4015142824366181, |
|
"eval_loss": 0.5596053600311279, |
|
"eval_runtime": 13.0466, |
|
"eval_samples_per_second": 504.652, |
|
"eval_steps_per_second": 7.895, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.40266146610072273, |
|
"eval_loss": 0.5589408874511719, |
|
"eval_runtime": 13.0715, |
|
"eval_samples_per_second": 503.69, |
|
"eval_steps_per_second": 7.88, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.40380864976482733, |
|
"eval_loss": 0.558090329170227, |
|
"eval_runtime": 12.9472, |
|
"eval_samples_per_second": 508.528, |
|
"eval_steps_per_second": 7.955, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.404955833428932, |
|
"eval_loss": 0.5570879578590393, |
|
"eval_runtime": 12.9169, |
|
"eval_samples_per_second": 509.72, |
|
"eval_steps_per_second": 7.974, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.4061030170930366, |
|
"eval_loss": 0.5562835931777954, |
|
"eval_runtime": 12.9892, |
|
"eval_samples_per_second": 506.881, |
|
"eval_steps_per_second": 7.93, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.40725020075714125, |
|
"eval_loss": 0.5557334423065186, |
|
"eval_runtime": 12.9142, |
|
"eval_samples_per_second": 509.826, |
|
"eval_steps_per_second": 7.976, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.40839738442124585, |
|
"eval_loss": 0.5551453828811646, |
|
"eval_runtime": 12.9967, |
|
"eval_samples_per_second": 506.59, |
|
"eval_steps_per_second": 7.925, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.40954456808535045, |
|
"eval_loss": 0.5546151995658875, |
|
"eval_runtime": 13.0283, |
|
"eval_samples_per_second": 505.362, |
|
"eval_steps_per_second": 7.906, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.4106917517494551, |
|
"eval_loss": 0.5540978312492371, |
|
"eval_runtime": 13.0306, |
|
"eval_samples_per_second": 505.273, |
|
"eval_steps_per_second": 7.904, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.4118389354135597, |
|
"eval_loss": 0.5535085797309875, |
|
"eval_runtime": 13.0453, |
|
"eval_samples_per_second": 504.704, |
|
"eval_steps_per_second": 7.896, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.41298611907766436, |
|
"grad_norm": 3.073883056640625, |
|
"learning_rate": 9.581899298916509e-06, |
|
"loss": 0.955, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.41298611907766436, |
|
"eval_loss": 0.5527840256690979, |
|
"eval_runtime": 13.0322, |
|
"eval_samples_per_second": 505.21, |
|
"eval_steps_per_second": 7.903, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.41413330274176896, |
|
"eval_loss": 0.5521849989891052, |
|
"eval_runtime": 13.0324, |
|
"eval_samples_per_second": 505.202, |
|
"eval_steps_per_second": 7.903, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.41528048640587356, |
|
"eval_loss": 0.5515934824943542, |
|
"eval_runtime": 13.0428, |
|
"eval_samples_per_second": 504.799, |
|
"eval_steps_per_second": 7.897, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.4164276700699782, |
|
"eval_loss": 0.5508650541305542, |
|
"eval_runtime": 13.0451, |
|
"eval_samples_per_second": 504.709, |
|
"eval_steps_per_second": 7.896, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.4175748537340828, |
|
"eval_loss": 0.5502671003341675, |
|
"eval_runtime": 13.0251, |
|
"eval_samples_per_second": 505.484, |
|
"eval_steps_per_second": 7.908, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.4187220373981875, |
|
"eval_loss": 0.5495434403419495, |
|
"eval_runtime": 13.0242, |
|
"eval_samples_per_second": 505.519, |
|
"eval_steps_per_second": 7.908, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.4198692210622921, |
|
"eval_loss": 0.5489790439605713, |
|
"eval_runtime": 13.0389, |
|
"eval_samples_per_second": 504.951, |
|
"eval_steps_per_second": 7.899, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.4210164047263967, |
|
"eval_loss": 0.5481202006340027, |
|
"eval_runtime": 12.9193, |
|
"eval_samples_per_second": 509.627, |
|
"eval_steps_per_second": 7.973, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.42216358839050133, |
|
"eval_loss": 0.5474987030029297, |
|
"eval_runtime": 13.0455, |
|
"eval_samples_per_second": 504.697, |
|
"eval_steps_per_second": 7.895, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.42331077205460593, |
|
"eval_loss": 0.5467217564582825, |
|
"eval_runtime": 12.9516, |
|
"eval_samples_per_second": 508.356, |
|
"eval_steps_per_second": 7.953, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.4244579557187106, |
|
"grad_norm": 4.54811954498291, |
|
"learning_rate": 9.539409390269812e-06, |
|
"loss": 0.9387, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.4244579557187106, |
|
"eval_loss": 0.5463354587554932, |
|
"eval_runtime": 12.949, |
|
"eval_samples_per_second": 508.456, |
|
"eval_steps_per_second": 7.954, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.4256051393828152, |
|
"eval_loss": 0.5458938479423523, |
|
"eval_runtime": 12.9107, |
|
"eval_samples_per_second": 509.966, |
|
"eval_steps_per_second": 7.978, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.4267523230469198, |
|
"eval_loss": 0.5451832413673401, |
|
"eval_runtime": 13.0127, |
|
"eval_samples_per_second": 505.968, |
|
"eval_steps_per_second": 7.915, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.42789950671102445, |
|
"eval_loss": 0.5448161363601685, |
|
"eval_runtime": 13.033, |
|
"eval_samples_per_second": 505.18, |
|
"eval_steps_per_second": 7.903, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.42904669037512905, |
|
"eval_loss": 0.5443439483642578, |
|
"eval_runtime": 13.0146, |
|
"eval_samples_per_second": 505.895, |
|
"eval_steps_per_second": 7.914, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.4301938740392337, |
|
"eval_loss": 0.544032633304596, |
|
"eval_runtime": 12.9589, |
|
"eval_samples_per_second": 508.069, |
|
"eval_steps_per_second": 7.948, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.4313410577033383, |
|
"eval_loss": 0.5435395836830139, |
|
"eval_runtime": 12.9098, |
|
"eval_samples_per_second": 510.001, |
|
"eval_steps_per_second": 7.978, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.4324882413674429, |
|
"eval_loss": 0.5429771542549133, |
|
"eval_runtime": 12.9366, |
|
"eval_samples_per_second": 508.944, |
|
"eval_steps_per_second": 7.962, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.43363542503154756, |
|
"eval_loss": 0.5423203706741333, |
|
"eval_runtime": 13.1085, |
|
"eval_samples_per_second": 502.268, |
|
"eval_steps_per_second": 7.857, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.43478260869565216, |
|
"eval_loss": 0.5417954921722412, |
|
"eval_runtime": 13.0184, |
|
"eval_samples_per_second": 505.747, |
|
"eval_steps_per_second": 7.912, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.4359297923597568, |
|
"grad_norm": 6.273746967315674, |
|
"learning_rate": 9.496919481623116e-06, |
|
"loss": 0.9672, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.4359297923597568, |
|
"eval_loss": 0.5414963364601135, |
|
"eval_runtime": 13.0358, |
|
"eval_samples_per_second": 505.07, |
|
"eval_steps_per_second": 7.901, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.4370769760238614, |
|
"eval_loss": 0.541257381439209, |
|
"eval_runtime": 12.9726, |
|
"eval_samples_per_second": 507.53, |
|
"eval_steps_per_second": 7.94, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.438224159687966, |
|
"eval_loss": 0.5409945845603943, |
|
"eval_runtime": 13.0274, |
|
"eval_samples_per_second": 505.398, |
|
"eval_steps_per_second": 7.906, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.4393713433520707, |
|
"eval_loss": 0.5406415462493896, |
|
"eval_runtime": 13.0472, |
|
"eval_samples_per_second": 504.629, |
|
"eval_steps_per_second": 7.894, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.4405185270161753, |
|
"eval_loss": 0.5403361916542053, |
|
"eval_runtime": 12.9682, |
|
"eval_samples_per_second": 507.704, |
|
"eval_steps_per_second": 7.943, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.44166571068027993, |
|
"eval_loss": 0.5397402048110962, |
|
"eval_runtime": 13.0503, |
|
"eval_samples_per_second": 504.509, |
|
"eval_steps_per_second": 7.893, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.44281289434438453, |
|
"eval_loss": 0.5394212007522583, |
|
"eval_runtime": 12.9962, |
|
"eval_samples_per_second": 506.611, |
|
"eval_steps_per_second": 7.925, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.44396007800848913, |
|
"eval_loss": 0.538590669631958, |
|
"eval_runtime": 13.0433, |
|
"eval_samples_per_second": 504.78, |
|
"eval_steps_per_second": 7.897, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.4451072616725938, |
|
"eval_loss": 0.537807822227478, |
|
"eval_runtime": 12.9414, |
|
"eval_samples_per_second": 508.756, |
|
"eval_steps_per_second": 7.959, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.4462544453366984, |
|
"eval_loss": 0.536992073059082, |
|
"eval_runtime": 12.9902, |
|
"eval_samples_per_second": 506.844, |
|
"eval_steps_per_second": 7.929, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.44740162900080305, |
|
"grad_norm": 2.6543989181518555, |
|
"learning_rate": 9.45442957297642e-06, |
|
"loss": 0.926, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.44740162900080305, |
|
"eval_loss": 0.5359557867050171, |
|
"eval_runtime": 13.0495, |
|
"eval_samples_per_second": 504.542, |
|
"eval_steps_per_second": 7.893, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.44854881266490765, |
|
"eval_loss": 0.5351366400718689, |
|
"eval_runtime": 12.923, |
|
"eval_samples_per_second": 509.48, |
|
"eval_steps_per_second": 7.97, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.44969599632901225, |
|
"eval_loss": 0.5345980525016785, |
|
"eval_runtime": 12.9402, |
|
"eval_samples_per_second": 508.803, |
|
"eval_steps_per_second": 7.96, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.4508431799931169, |
|
"eval_loss": 0.5343027710914612, |
|
"eval_runtime": 13.0184, |
|
"eval_samples_per_second": 505.747, |
|
"eval_steps_per_second": 7.912, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.4519903636572215, |
|
"eval_loss": 0.5339150428771973, |
|
"eval_runtime": 13.0383, |
|
"eval_samples_per_second": 504.974, |
|
"eval_steps_per_second": 7.9, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.45313754732132616, |
|
"eval_loss": 0.533658504486084, |
|
"eval_runtime": 13.0155, |
|
"eval_samples_per_second": 505.857, |
|
"eval_steps_per_second": 7.914, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.45428473098543076, |
|
"eval_loss": 0.5334100127220154, |
|
"eval_runtime": 12.9238, |
|
"eval_samples_per_second": 509.45, |
|
"eval_steps_per_second": 7.97, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.45543191464953536, |
|
"eval_loss": 0.5330451726913452, |
|
"eval_runtime": 12.9194, |
|
"eval_samples_per_second": 509.619, |
|
"eval_steps_per_second": 7.972, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 0.45657909831364, |
|
"eval_loss": 0.5327152609825134, |
|
"eval_runtime": 13.015, |
|
"eval_samples_per_second": 505.879, |
|
"eval_steps_per_second": 7.914, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 0.4577262819777446, |
|
"eval_loss": 0.5323871374130249, |
|
"eval_runtime": 13.021, |
|
"eval_samples_per_second": 505.646, |
|
"eval_steps_per_second": 7.91, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.4588734656418493, |
|
"grad_norm": 2.330146074295044, |
|
"learning_rate": 9.411939664329721e-06, |
|
"loss": 0.867, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.4588734656418493, |
|
"eval_loss": 0.5319190621376038, |
|
"eval_runtime": 13.0222, |
|
"eval_samples_per_second": 505.6, |
|
"eval_steps_per_second": 7.91, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.4600206493059539, |
|
"eval_loss": 0.5313332676887512, |
|
"eval_runtime": 13.0478, |
|
"eval_samples_per_second": 504.605, |
|
"eval_steps_per_second": 7.894, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 0.46116783297005853, |
|
"eval_loss": 0.5308486223220825, |
|
"eval_runtime": 12.964, |
|
"eval_samples_per_second": 507.868, |
|
"eval_steps_per_second": 7.945, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 0.46231501663416313, |
|
"eval_loss": 0.5300149917602539, |
|
"eval_runtime": 12.914, |
|
"eval_samples_per_second": 509.835, |
|
"eval_steps_per_second": 7.976, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 0.46346220029826773, |
|
"eval_loss": 0.5292573571205139, |
|
"eval_runtime": 12.9769, |
|
"eval_samples_per_second": 507.361, |
|
"eval_steps_per_second": 7.937, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 0.4646093839623724, |
|
"eval_loss": 0.5286610722541809, |
|
"eval_runtime": 13.0368, |
|
"eval_samples_per_second": 505.031, |
|
"eval_steps_per_second": 7.901, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.465756567626477, |
|
"eval_loss": 0.5283547043800354, |
|
"eval_runtime": 13.0356, |
|
"eval_samples_per_second": 505.077, |
|
"eval_steps_per_second": 7.901, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 0.46690375129058165, |
|
"eval_loss": 0.5280731916427612, |
|
"eval_runtime": 13.0048, |
|
"eval_samples_per_second": 506.275, |
|
"eval_steps_per_second": 7.92, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 0.46805093495468625, |
|
"eval_loss": 0.5276577472686768, |
|
"eval_runtime": 12.9174, |
|
"eval_samples_per_second": 509.699, |
|
"eval_steps_per_second": 7.974, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 0.46919811861879085, |
|
"eval_loss": 0.5272203087806702, |
|
"eval_runtime": 12.9134, |
|
"eval_samples_per_second": 509.857, |
|
"eval_steps_per_second": 7.976, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 0.4703453022828955, |
|
"grad_norm": 2.8037149906158447, |
|
"learning_rate": 9.369449755683025e-06, |
|
"loss": 0.916, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.4703453022828955, |
|
"eval_loss": 0.5267237424850464, |
|
"eval_runtime": 12.9151, |
|
"eval_samples_per_second": 509.79, |
|
"eval_steps_per_second": 7.975, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.4714924859470001, |
|
"eval_loss": 0.5260052680969238, |
|
"eval_runtime": 12.9148, |
|
"eval_samples_per_second": 509.801, |
|
"eval_steps_per_second": 7.975, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 0.47263966961110476, |
|
"eval_loss": 0.5252425074577332, |
|
"eval_runtime": 13.014, |
|
"eval_samples_per_second": 505.915, |
|
"eval_steps_per_second": 7.915, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 0.47378685327520936, |
|
"eval_loss": 0.5246235132217407, |
|
"eval_runtime": 13.0291, |
|
"eval_samples_per_second": 505.332, |
|
"eval_steps_per_second": 7.905, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 0.47493403693931396, |
|
"eval_loss": 0.5238820314407349, |
|
"eval_runtime": 12.9264, |
|
"eval_samples_per_second": 509.345, |
|
"eval_steps_per_second": 7.968, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 0.4760812206034186, |
|
"eval_loss": 0.5231978297233582, |
|
"eval_runtime": 12.9421, |
|
"eval_samples_per_second": 508.729, |
|
"eval_steps_per_second": 7.959, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 0.4772284042675232, |
|
"eval_loss": 0.5225175023078918, |
|
"eval_runtime": 13.0292, |
|
"eval_samples_per_second": 505.328, |
|
"eval_steps_per_second": 7.905, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 0.4783755879316279, |
|
"eval_loss": 0.522089958190918, |
|
"eval_runtime": 13.0462, |
|
"eval_samples_per_second": 504.666, |
|
"eval_steps_per_second": 7.895, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 0.4795227715957325, |
|
"eval_loss": 0.5216270685195923, |
|
"eval_runtime": 13.0261, |
|
"eval_samples_per_second": 505.448, |
|
"eval_steps_per_second": 7.907, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 0.4806699552598371, |
|
"eval_loss": 0.5211179256439209, |
|
"eval_runtime": 13.0229, |
|
"eval_samples_per_second": 505.569, |
|
"eval_steps_per_second": 7.909, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 0.48181713892394173, |
|
"grad_norm": 4.981523036956787, |
|
"learning_rate": 9.32695984703633e-06, |
|
"loss": 0.9667, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.48181713892394173, |
|
"eval_loss": 0.5206459760665894, |
|
"eval_runtime": 13.0189, |
|
"eval_samples_per_second": 505.728, |
|
"eval_steps_per_second": 7.912, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.48296432258804634, |
|
"eval_loss": 0.5203832983970642, |
|
"eval_runtime": 13.0372, |
|
"eval_samples_per_second": 505.016, |
|
"eval_steps_per_second": 7.9, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 0.484111506252151, |
|
"eval_loss": 0.5200340151786804, |
|
"eval_runtime": 12.921, |
|
"eval_samples_per_second": 509.558, |
|
"eval_steps_per_second": 7.972, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 0.4852586899162556, |
|
"eval_loss": 0.5191519856452942, |
|
"eval_runtime": 12.9412, |
|
"eval_samples_per_second": 508.761, |
|
"eval_steps_per_second": 7.959, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 0.4864058735803602, |
|
"eval_loss": 0.5187414884567261, |
|
"eval_runtime": 13.0244, |
|
"eval_samples_per_second": 505.513, |
|
"eval_steps_per_second": 7.908, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 0.48755305724446485, |
|
"eval_loss": 0.5184528827667236, |
|
"eval_runtime": 13.0462, |
|
"eval_samples_per_second": 504.669, |
|
"eval_steps_per_second": 7.895, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 0.48870024090856945, |
|
"eval_loss": 0.5179461240768433, |
|
"eval_runtime": 12.9404, |
|
"eval_samples_per_second": 508.793, |
|
"eval_steps_per_second": 7.96, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 0.4898474245726741, |
|
"eval_loss": 0.5173225402832031, |
|
"eval_runtime": 12.9886, |
|
"eval_samples_per_second": 506.904, |
|
"eval_steps_per_second": 7.93, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 0.4909946082367787, |
|
"eval_loss": 0.5169667601585388, |
|
"eval_runtime": 12.918, |
|
"eval_samples_per_second": 509.678, |
|
"eval_steps_per_second": 7.973, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 0.4921417919008833, |
|
"eval_loss": 0.5165331363677979, |
|
"eval_runtime": 12.9162, |
|
"eval_samples_per_second": 509.747, |
|
"eval_steps_per_second": 7.974, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 0.49328897556498796, |
|
"grad_norm": 2.352576732635498, |
|
"learning_rate": 9.284469938389632e-06, |
|
"loss": 0.9276, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.49328897556498796, |
|
"eval_loss": 0.5160226821899414, |
|
"eval_runtime": 12.9928, |
|
"eval_samples_per_second": 506.741, |
|
"eval_steps_per_second": 7.927, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.49443615922909256, |
|
"eval_loss": 0.5154353976249695, |
|
"eval_runtime": 13.0317, |
|
"eval_samples_per_second": 505.228, |
|
"eval_steps_per_second": 7.904, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 0.4955833428931972, |
|
"eval_loss": 0.5149873495101929, |
|
"eval_runtime": 12.9748, |
|
"eval_samples_per_second": 507.445, |
|
"eval_steps_per_second": 7.938, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 0.4967305265573018, |
|
"eval_loss": 0.514439582824707, |
|
"eval_runtime": 12.9649, |
|
"eval_samples_per_second": 507.833, |
|
"eval_steps_per_second": 7.945, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 0.4978777102214064, |
|
"eval_loss": 0.5140998363494873, |
|
"eval_runtime": 13.0221, |
|
"eval_samples_per_second": 505.603, |
|
"eval_steps_per_second": 7.91, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 0.4990248938855111, |
|
"eval_loss": 0.5139394402503967, |
|
"eval_runtime": 13.0098, |
|
"eval_samples_per_second": 506.081, |
|
"eval_steps_per_second": 7.917, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 0.5001720775496157, |
|
"eval_loss": 0.5137957334518433, |
|
"eval_runtime": 13.0381, |
|
"eval_samples_per_second": 504.983, |
|
"eval_steps_per_second": 7.9, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 0.5013192612137203, |
|
"eval_loss": 0.5136282444000244, |
|
"eval_runtime": 13.0808, |
|
"eval_samples_per_second": 503.332, |
|
"eval_steps_per_second": 7.874, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 0.502466444877825, |
|
"eval_loss": 0.5132753849029541, |
|
"eval_runtime": 13.0402, |
|
"eval_samples_per_second": 504.902, |
|
"eval_steps_per_second": 7.899, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 0.5036136285419296, |
|
"eval_loss": 0.5129442811012268, |
|
"eval_runtime": 12.9179, |
|
"eval_samples_per_second": 509.682, |
|
"eval_steps_per_second": 7.973, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 0.5047608122060342, |
|
"grad_norm": 2.744755983352661, |
|
"learning_rate": 9.241980029742936e-06, |
|
"loss": 0.9331, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.5047608122060342, |
|
"eval_loss": 0.5125741362571716, |
|
"eval_runtime": 12.9316, |
|
"eval_samples_per_second": 509.142, |
|
"eval_steps_per_second": 7.965, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.5059079958701388, |
|
"eval_loss": 0.5123194456100464, |
|
"eval_runtime": 13.0104, |
|
"eval_samples_per_second": 506.056, |
|
"eval_steps_per_second": 7.917, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 0.5070551795342434, |
|
"eval_loss": 0.5116878747940063, |
|
"eval_runtime": 13.0458, |
|
"eval_samples_per_second": 504.684, |
|
"eval_steps_per_second": 7.895, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 0.5082023631983481, |
|
"eval_loss": 0.5112791061401367, |
|
"eval_runtime": 13.0399, |
|
"eval_samples_per_second": 504.911, |
|
"eval_steps_per_second": 7.899, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 0.5093495468624527, |
|
"eval_loss": 0.5108391642570496, |
|
"eval_runtime": 13.0444, |
|
"eval_samples_per_second": 504.738, |
|
"eval_steps_per_second": 7.896, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 0.5104967305265573, |
|
"eval_loss": 0.5106413960456848, |
|
"eval_runtime": 13.0495, |
|
"eval_samples_per_second": 504.542, |
|
"eval_steps_per_second": 7.893, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 0.5116439141906619, |
|
"eval_loss": 0.5105773210525513, |
|
"eval_runtime": 13.026, |
|
"eval_samples_per_second": 505.453, |
|
"eval_steps_per_second": 7.907, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 0.5127910978547665, |
|
"eval_loss": 0.5106124877929688, |
|
"eval_runtime": 13.0887, |
|
"eval_samples_per_second": 503.028, |
|
"eval_steps_per_second": 7.869, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 0.5139382815188712, |
|
"eval_loss": 0.5104395151138306, |
|
"eval_runtime": 13.0488, |
|
"eval_samples_per_second": 504.567, |
|
"eval_steps_per_second": 7.893, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 0.5150854651829758, |
|
"eval_loss": 0.510217547416687, |
|
"eval_runtime": 13.0309, |
|
"eval_samples_per_second": 505.261, |
|
"eval_steps_per_second": 7.904, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 0.5162326488470804, |
|
"grad_norm": 3.2935314178466797, |
|
"learning_rate": 9.199490121096241e-06, |
|
"loss": 0.907, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.5162326488470804, |
|
"eval_loss": 0.5096931457519531, |
|
"eval_runtime": 13.0419, |
|
"eval_samples_per_second": 504.834, |
|
"eval_steps_per_second": 7.898, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.517379832511185, |
|
"eval_loss": 0.5092429518699646, |
|
"eval_runtime": 13.0377, |
|
"eval_samples_per_second": 504.999, |
|
"eval_steps_per_second": 7.9, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 0.5185270161752896, |
|
"eval_loss": 0.5086391568183899, |
|
"eval_runtime": 12.9418, |
|
"eval_samples_per_second": 508.739, |
|
"eval_steps_per_second": 7.959, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 0.5196741998393943, |
|
"eval_loss": 0.508176326751709, |
|
"eval_runtime": 13.154, |
|
"eval_samples_per_second": 500.533, |
|
"eval_steps_per_second": 7.83, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 0.5208213835034989, |
|
"eval_loss": 0.5079353451728821, |
|
"eval_runtime": 13.0269, |
|
"eval_samples_per_second": 505.417, |
|
"eval_steps_per_second": 7.907, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 0.5219685671676035, |
|
"eval_loss": 0.5075249671936035, |
|
"eval_runtime": 13.0283, |
|
"eval_samples_per_second": 505.36, |
|
"eval_steps_per_second": 7.906, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 0.5231157508317081, |
|
"eval_loss": 0.5070711970329285, |
|
"eval_runtime": 13.0867, |
|
"eval_samples_per_second": 503.106, |
|
"eval_steps_per_second": 7.871, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 0.5242629344958127, |
|
"eval_loss": 0.5066924095153809, |
|
"eval_runtime": 13.0158, |
|
"eval_samples_per_second": 505.847, |
|
"eval_steps_per_second": 7.913, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 0.5254101181599174, |
|
"eval_loss": 0.5066002607345581, |
|
"eval_runtime": 13.0247, |
|
"eval_samples_per_second": 505.499, |
|
"eval_steps_per_second": 7.908, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 0.526557301824022, |
|
"eval_loss": 0.5061988830566406, |
|
"eval_runtime": 12.9452, |
|
"eval_samples_per_second": 508.604, |
|
"eval_steps_per_second": 7.957, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 0.5277044854881267, |
|
"grad_norm": 2.8888604640960693, |
|
"learning_rate": 9.157000212449543e-06, |
|
"loss": 0.913, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 0.5277044854881267, |
|
"eval_loss": 0.5059130787849426, |
|
"eval_runtime": 13.0522, |
|
"eval_samples_per_second": 504.435, |
|
"eval_steps_per_second": 7.891, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 0.5288516691522313, |
|
"eval_loss": 0.5056325793266296, |
|
"eval_runtime": 13.0474, |
|
"eval_samples_per_second": 504.62, |
|
"eval_steps_per_second": 7.894, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 0.5299988528163359, |
|
"eval_loss": 0.5051873922348022, |
|
"eval_runtime": 13.0297, |
|
"eval_samples_per_second": 505.306, |
|
"eval_steps_per_second": 7.905, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 0.5311460364804406, |
|
"eval_loss": 0.5046107769012451, |
|
"eval_runtime": 13.0544, |
|
"eval_samples_per_second": 504.351, |
|
"eval_steps_per_second": 7.89, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 0.5322932201445452, |
|
"eval_loss": 0.5039039254188538, |
|
"eval_runtime": 13.0434, |
|
"eval_samples_per_second": 504.778, |
|
"eval_steps_per_second": 7.897, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 0.5334404038086498, |
|
"eval_loss": 0.5032686591148376, |
|
"eval_runtime": 13.0225, |
|
"eval_samples_per_second": 505.586, |
|
"eval_steps_per_second": 7.909, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.5345875874727544, |
|
"eval_loss": 0.5030205249786377, |
|
"eval_runtime": 13.044, |
|
"eval_samples_per_second": 504.753, |
|
"eval_steps_per_second": 7.896, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 0.535734771136859, |
|
"eval_loss": 0.5027660727500916, |
|
"eval_runtime": 13.0891, |
|
"eval_samples_per_second": 503.013, |
|
"eval_steps_per_second": 7.869, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 0.5368819548009637, |
|
"eval_loss": 0.5026541352272034, |
|
"eval_runtime": 13.0208, |
|
"eval_samples_per_second": 505.653, |
|
"eval_steps_per_second": 7.91, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 0.5380291384650683, |
|
"eval_loss": 0.5023172497749329, |
|
"eval_runtime": 13.0369, |
|
"eval_samples_per_second": 505.028, |
|
"eval_steps_per_second": 7.901, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 0.5391763221291729, |
|
"grad_norm": 2.457648992538452, |
|
"learning_rate": 9.114510303802847e-06, |
|
"loss": 0.9047, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 0.5391763221291729, |
|
"eval_loss": 0.5020408034324646, |
|
"eval_runtime": 13.0428, |
|
"eval_samples_per_second": 504.8, |
|
"eval_steps_per_second": 7.897, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 0.5403235057932775, |
|
"eval_loss": 0.5017600059509277, |
|
"eval_runtime": 13.0177, |
|
"eval_samples_per_second": 505.772, |
|
"eval_steps_per_second": 7.912, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 0.5414706894573821, |
|
"eval_loss": 0.5015026926994324, |
|
"eval_runtime": 13.0186, |
|
"eval_samples_per_second": 505.738, |
|
"eval_steps_per_second": 7.912, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 0.5426178731214868, |
|
"eval_loss": 0.5008870959281921, |
|
"eval_runtime": 12.9903, |
|
"eval_samples_per_second": 506.841, |
|
"eval_steps_per_second": 7.929, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 0.5437650567855914, |
|
"eval_loss": 0.500299334526062, |
|
"eval_runtime": 13.032, |
|
"eval_samples_per_second": 505.218, |
|
"eval_steps_per_second": 7.904, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 0.544912240449696, |
|
"eval_loss": 0.49969592690467834, |
|
"eval_runtime": 13.0199, |
|
"eval_samples_per_second": 505.686, |
|
"eval_steps_per_second": 7.911, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 0.5460594241138006, |
|
"eval_loss": 0.4990648329257965, |
|
"eval_runtime": 13.0229, |
|
"eval_samples_per_second": 505.573, |
|
"eval_steps_per_second": 7.909, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 0.5472066077779052, |
|
"eval_loss": 0.4983595013618469, |
|
"eval_runtime": 13.0558, |
|
"eval_samples_per_second": 504.298, |
|
"eval_steps_per_second": 7.889, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 0.5483537914420099, |
|
"eval_loss": 0.4980465769767761, |
|
"eval_runtime": 13.0075, |
|
"eval_samples_per_second": 506.171, |
|
"eval_steps_per_second": 7.919, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 0.5495009751061145, |
|
"eval_loss": 0.4980180859565735, |
|
"eval_runtime": 13.0189, |
|
"eval_samples_per_second": 505.727, |
|
"eval_steps_per_second": 7.912, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 0.5506481587702191, |
|
"grad_norm": 2.9509494304656982, |
|
"learning_rate": 9.07202039515615e-06, |
|
"loss": 0.887, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.5506481587702191, |
|
"eval_loss": 0.4978945553302765, |
|
"eval_runtime": 13.073, |
|
"eval_samples_per_second": 503.632, |
|
"eval_steps_per_second": 7.879, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.5517953424343237, |
|
"eval_loss": 0.49748191237449646, |
|
"eval_runtime": 13.0456, |
|
"eval_samples_per_second": 504.69, |
|
"eval_steps_per_second": 7.895, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 0.5529425260984283, |
|
"eval_loss": 0.49734070897102356, |
|
"eval_runtime": 12.917, |
|
"eval_samples_per_second": 509.716, |
|
"eval_steps_per_second": 7.974, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 0.554089709762533, |
|
"eval_loss": 0.4968598783016205, |
|
"eval_runtime": 12.9242, |
|
"eval_samples_per_second": 509.434, |
|
"eval_steps_per_second": 7.97, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 0.5552368934266376, |
|
"eval_loss": 0.49663135409355164, |
|
"eval_runtime": 12.917, |
|
"eval_samples_per_second": 509.714, |
|
"eval_steps_per_second": 7.974, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 0.5563840770907422, |
|
"eval_loss": 0.4964074492454529, |
|
"eval_runtime": 12.9307, |
|
"eval_samples_per_second": 509.176, |
|
"eval_steps_per_second": 7.966, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 0.5575312607548468, |
|
"eval_loss": 0.49635863304138184, |
|
"eval_runtime": 13.0256, |
|
"eval_samples_per_second": 505.465, |
|
"eval_steps_per_second": 7.907, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 0.5586784444189514, |
|
"eval_loss": 0.4959893226623535, |
|
"eval_runtime": 13.019, |
|
"eval_samples_per_second": 505.724, |
|
"eval_steps_per_second": 7.912, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 0.5598256280830561, |
|
"eval_loss": 0.495743066072464, |
|
"eval_runtime": 12.9529, |
|
"eval_samples_per_second": 508.302, |
|
"eval_steps_per_second": 7.952, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 0.5609728117471607, |
|
"eval_loss": 0.49548840522766113, |
|
"eval_runtime": 12.9165, |
|
"eval_samples_per_second": 509.734, |
|
"eval_steps_per_second": 7.974, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 0.5621199954112653, |
|
"grad_norm": 2.523698329925537, |
|
"learning_rate": 9.029530486509454e-06, |
|
"loss": 0.8645, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 0.5621199954112653, |
|
"eval_loss": 0.4952070116996765, |
|
"eval_runtime": 12.9252, |
|
"eval_samples_per_second": 509.394, |
|
"eval_steps_per_second": 7.969, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 0.5632671790753699, |
|
"eval_loss": 0.49501940608024597, |
|
"eval_runtime": 12.9174, |
|
"eval_samples_per_second": 509.699, |
|
"eval_steps_per_second": 7.974, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 0.5644143627394745, |
|
"eval_loss": 0.49515894055366516, |
|
"eval_runtime": 12.9248, |
|
"eval_samples_per_second": 509.409, |
|
"eval_steps_per_second": 7.969, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 0.5655615464035793, |
|
"eval_loss": 0.4948519468307495, |
|
"eval_runtime": 13.0296, |
|
"eval_samples_per_second": 505.311, |
|
"eval_steps_per_second": 7.905, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 0.5667087300676839, |
|
"eval_loss": 0.4942854344844818, |
|
"eval_runtime": 13.0211, |
|
"eval_samples_per_second": 505.639, |
|
"eval_steps_per_second": 7.91, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 0.5678559137317885, |
|
"eval_loss": 0.49377119541168213, |
|
"eval_runtime": 13.0383, |
|
"eval_samples_per_second": 504.973, |
|
"eval_steps_per_second": 7.9, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 0.569003097395893, |
|
"eval_loss": 0.4936191439628601, |
|
"eval_runtime": 13.02, |
|
"eval_samples_per_second": 505.683, |
|
"eval_steps_per_second": 7.911, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 0.5701502810599977, |
|
"eval_loss": 0.49330177903175354, |
|
"eval_runtime": 13.023, |
|
"eval_samples_per_second": 505.566, |
|
"eval_steps_per_second": 7.909, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 0.5712974647241024, |
|
"eval_loss": 0.4930713176727295, |
|
"eval_runtime": 13.0236, |
|
"eval_samples_per_second": 505.545, |
|
"eval_steps_per_second": 7.909, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 0.572444648388207, |
|
"eval_loss": 0.49287787079811096, |
|
"eval_runtime": 12.9359, |
|
"eval_samples_per_second": 508.972, |
|
"eval_steps_per_second": 7.962, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 0.5735918320523116, |
|
"grad_norm": 8.273838996887207, |
|
"learning_rate": 8.987040577862758e-06, |
|
"loss": 0.8348, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.5735918320523116, |
|
"eval_loss": 0.49241214990615845, |
|
"eval_runtime": 12.9315, |
|
"eval_samples_per_second": 509.146, |
|
"eval_steps_per_second": 7.965, |
|
"step": 5000 |
|
} |
|
], |
|
"logging_steps": 100, |
|
"max_steps": 26151, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 10, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 0.0, |
|
"train_batch_size": 64, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|