|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.0, |
|
"eval_steps": 500, |
|
"global_step": 1620, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.999905983014452e-05, |
|
"loss": 1.242, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.999623940897003e-05, |
|
"loss": 1.0243, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.9991539001644015e-05, |
|
"loss": 1.1997, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.998495905008461e-05, |
|
"loss": 1.0152, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 3.997650017291901e-05, |
|
"loss": 0.848, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.996616316542537e-05, |
|
"loss": 0.9819, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.995394899945796e-05, |
|
"loss": 0.8749, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 3.993985882335584e-05, |
|
"loss": 0.7318, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 3.9923893961834914e-05, |
|
"loss": 0.9248, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 3.9906055915863316e-05, |
|
"loss": 0.6812, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 3.9886346362520375e-05, |
|
"loss": 0.9068, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 3.9864767154838864e-05, |
|
"loss": 0.747, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 3.984132032163085e-05, |
|
"loss": 0.6329, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 3.9816008067296905e-05, |
|
"loss": 0.653, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 3.978883277161889e-05, |
|
"loss": 0.851, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 3.9759796989536185e-05, |
|
"loss": 0.7384, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.972890345090548e-05, |
|
"loss": 0.7389, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.9696155060244166e-05, |
|
"loss": 0.7009, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.9661554896457205e-05, |
|
"loss": 0.7215, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.9625106212547696e-05, |
|
"loss": 0.8377, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.958681243531103e-05, |
|
"loss": 0.7017, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.9546677165012714e-05, |
|
"loss": 0.6667, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.950470417504987e-05, |
|
"loss": 0.7254, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.946089741159648e-05, |
|
"loss": 0.7825, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.9415260993232405e-05, |
|
"loss": 0.599, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.9367799210556124e-05, |
|
"loss": 0.5953, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 3.931851652578137e-05, |
|
"loss": 0.634, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 3.926741757231761e-05, |
|
"loss": 0.6156, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 3.921450715433441e-05, |
|
"loss": 0.6751, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 3.915979024630978e-05, |
|
"loss": 0.659, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 3.910327199256246e-05, |
|
"loss": 0.5869, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 3.904495770676831e-05, |
|
"loss": 0.6645, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 3.898485287146068e-05, |
|
"loss": 0.6884, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 3.892296313751502e-05, |
|
"loss": 0.5636, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 3.885929432361752e-05, |
|
"loss": 0.5711, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 3.879385241571817e-05, |
|
"loss": 0.6194, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 3.8726643566467866e-05, |
|
"loss": 0.5763, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 3.865767409464002e-05, |
|
"loss": 0.5505, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.858695048453645e-05, |
|
"loss": 0.5694, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.8514479385377813e-05, |
|
"loss": 0.6624, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.844026761067837e-05, |
|
"loss": 0.6462, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.8364322137605484e-05, |
|
"loss": 0.8272, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.828665010632359e-05, |
|
"loss": 0.5884, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.8207258819322936e-05, |
|
"loss": 0.8304, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.812615574073301e-05, |
|
"loss": 0.6169, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.804334849562076e-05, |
|
"loss": 0.4681, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.795884486927376e-05, |
|
"loss": 0.7102, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.787265280646825e-05, |
|
"loss": 0.6784, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.7784780410722125e-05, |
|
"loss": 0.6555, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.7695235943533155e-05, |
|
"loss": 0.7551, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.760402782360222e-05, |
|
"loss": 0.6307, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.7511164626041823e-05, |
|
"loss": 0.5889, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.741665508156984e-05, |
|
"loss": 0.6239, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.732050807568878e-05, |
|
"loss": 0.5586, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.7222732647850276e-05, |
|
"loss": 0.6846, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.7123337990605335e-05, |
|
"loss": 0.6084, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.702233344873999e-05, |
|
"loss": 0.6484, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.691972851839682e-05, |
|
"loss": 0.6492, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.6815532846182066e-05, |
|
"loss": 0.5321, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.6709756228258735e-05, |
|
"loss": 0.6356, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.660240860942558e-05, |
|
"loss": 0.6797, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.649350008218214e-05, |
|
"loss": 0.7649, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.638304088577984e-05, |
|
"loss": 0.5038, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.6271041405259354e-05, |
|
"loss": 0.649, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.6880638003349304, |
|
"eval_runtime": 15.1875, |
|
"eval_samples_per_second": 2.239, |
|
"eval_steps_per_second": 2.239, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.6157512170474224e-05, |
|
"loss": 0.6886, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.604246385510088e-05, |
|
"loss": 0.6532, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.592590727563511e-05, |
|
"loss": 0.5473, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.580785339037519e-05, |
|
"loss": 0.6395, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.568831329839152e-05, |
|
"loss": 0.6133, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.5567298238483206e-05, |
|
"loss": 0.5942, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.5444819588121384e-05, |
|
"loss": 0.5419, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.532088886237956e-05, |
|
"loss": 0.4829, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.5195517712850994e-05, |
|
"loss": 0.6501, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.506871792655321e-05, |
|
"loss": 0.5375, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.4940501424819927e-05, |
|
"loss": 0.4726, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.48108802621801e-05, |
|
"loss": 0.6864, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.4679866625224705e-05, |
|
"loss": 0.5066, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.4547472831460976e-05, |
|
"loss": 0.6395, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 3.4413711328154295e-05, |
|
"loss": 0.5156, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.4278594691157985e-05, |
|
"loss": 0.5499, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 3.4142135623730954e-05, |
|
"loss": 0.4902, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 3.400434695534337e-05, |
|
"loss": 0.5851, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 3.3865241640470486e-05, |
|
"loss": 0.6611, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 3.372483275737468e-05, |
|
"loss": 0.555, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 3.358313350687587e-05, |
|
"loss": 0.525, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 3.3440157211110454e-05, |
|
"loss": 0.5132, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 3.3295917312278754e-05, |
|
"loss": 0.4506, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 3.315042737138128e-05, |
|
"loss": 0.7732, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 3.300370106694368e-05, |
|
"loss": 0.5592, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 3.285575219373079e-05, |
|
"loss": 0.3999, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 3.27065946614497e-05, |
|
"loss": 0.5542, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 3.255624249344198e-05, |
|
"loss": 0.5484, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 3.2404709825365204e-05, |
|
"loss": 0.5777, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 3.2252010903864057e-05, |
|
"loss": 0.6279, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 3.2098160085230835e-05, |
|
"loss": 0.4809, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 3.194317183405573e-05, |
|
"loss": 0.5332, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 3.17870607218669e-05, |
|
"loss": 0.6713, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 3.1629841425760534e-05, |
|
"loss": 0.5631, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 3.147152872702092e-05, |
|
"loss": 0.5472, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 3.1312137509730776e-05, |
|
"loss": 0.4905, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 3.115168275937186e-05, |
|
"loss": 0.5958, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 3.0990179561416124e-05, |
|
"loss": 0.4714, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 3.0827643099907394e-05, |
|
"loss": 0.5457, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 3.066408865603383e-05, |
|
"loss": 0.6976, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 3.0499531606691204e-05, |
|
"loss": 0.5061, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 3.0333987423037262e-05, |
|
"loss": 0.4926, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 3.016747166903711e-05, |
|
"loss": 0.4324, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 3.0000000000000004e-05, |
|
"loss": 0.5261, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.9831588161107413e-05, |
|
"loss": 0.4908, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.9662251985932773e-05, |
|
"loss": 0.4972, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.9492007394952812e-05, |
|
"loss": 0.5168, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.9320870394050783e-05, |
|
"loss": 0.6196, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.9148857073011618e-05, |
|
"loss": 0.5633, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.897598360400925e-05, |
|
"loss": 0.6044, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.88022662400861e-05, |
|
"loss": 0.5007, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.8627721313625073e-05, |
|
"loss": 0.47, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 2.8452365234813992e-05, |
|
"loss": 0.5468, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 2.8276214490102788e-05, |
|
"loss": 0.5174, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 2.8099285640653474e-05, |
|
"loss": 0.5872, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 2.792159532078314e-05, |
|
"loss": 0.5265, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 2.7743160236400014e-05, |
|
"loss": 0.4837, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 2.7563997163432853e-05, |
|
"loss": 0.555, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 2.738412294625369e-05, |
|
"loss": 0.4858, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 2.720355449609421e-05, |
|
"loss": 0.607, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 2.702230878945578e-05, |
|
"loss": 0.4843, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 2.684040286651338e-05, |
|
"loss": 0.3911, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 2.665785382951353e-05, |
|
"loss": 0.4817, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 2.6474678841166426e-05, |
|
"loss": 0.5283, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 2.6290895123032277e-05, |
|
"loss": 0.4481, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 0.617668628692627, |
|
"eval_runtime": 15.1901, |
|
"eval_samples_per_second": 2.238, |
|
"eval_steps_per_second": 2.238, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 2.6106519953902268e-05, |
|
"loss": 0.4244, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 2.5921570668174e-05, |
|
"loss": 0.4139, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 2.5736064654221808e-05, |
|
"loss": 0.3932, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 2.5550019352761917e-05, |
|
"loss": 0.352, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 2.536345225521275e-05, |
|
"loss": 0.4855, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 2.5176380902050418e-05, |
|
"loss": 0.3717, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 2.4988822881159627e-05, |
|
"loss": 0.4027, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 2.4800795826180122e-05, |
|
"loss": 0.5418, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 2.4612317414848804e-05, |
|
"loss": 0.4436, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 2.4423405367337756e-05, |
|
"loss": 0.5756, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 2.423407744458822e-05, |
|
"loss": 0.4582, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 2.4044351446640763e-05, |
|
"loss": 0.4207, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 2.3854245210961798e-05, |
|
"loss": 0.4545, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 2.3663776610766537e-05, |
|
"loss": 0.4234, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 2.3472963553338614e-05, |
|
"loss": 0.5984, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 2.3281823978346484e-05, |
|
"loss": 0.4888, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 2.3090375856156813e-05, |
|
"loss": 0.4076, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.2898637186144935e-05, |
|
"loss": 0.5183, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.2706625995002626e-05, |
|
"loss": 0.491, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.251436033504326e-05, |
|
"loss": 0.4578, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.2321858282504606e-05, |
|
"loss": 0.5541, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.2129137935849365e-05, |
|
"loss": 0.6285, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.1936217414063584e-05, |
|
"loss": 0.4705, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.174311485495317e-05, |
|
"loss": 0.4543, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.154984841343862e-05, |
|
"loss": 0.4023, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.1356436259848186e-05, |
|
"loss": 0.4061, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 2.1162896578209517e-05, |
|
"loss": 0.4692, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 2.0969247564540064e-05, |
|
"loss": 0.4699, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 2.077550742513634e-05, |
|
"loss": 0.4712, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 2.058169437486223e-05, |
|
"loss": 0.4355, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 2.038782663543649e-05, |
|
"loss": 0.4162, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 2.0193922433719572e-05, |
|
"loss": 0.4225, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 2e-05, |
|
"loss": 0.3708, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.980607756628043e-05, |
|
"loss": 0.4406, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.9612173364563517e-05, |
|
"loss": 0.4062, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.9418305625137773e-05, |
|
"loss": 0.4487, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.9224492574863663e-05, |
|
"loss": 0.482, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.9030752435459946e-05, |
|
"loss": 0.4464, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.8837103421790486e-05, |
|
"loss": 0.4163, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.8643563740151817e-05, |
|
"loss": 0.3538, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.8450151586561386e-05, |
|
"loss": 0.4511, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 1.8256885145046837e-05, |
|
"loss": 0.5241, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 1.806378258593642e-05, |
|
"loss": 0.4675, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 1.7870862064150635e-05, |
|
"loss": 0.4342, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 1.7678141717495394e-05, |
|
"loss": 0.4943, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 1.748563966495675e-05, |
|
"loss": 0.4316, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 1.7293374004997384e-05, |
|
"loss": 0.4722, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 1.7101362813855068e-05, |
|
"loss": 0.4893, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 1.6909624143843193e-05, |
|
"loss": 0.4276, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 1.671817602165352e-05, |
|
"loss": 0.618, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 1.6527036446661396e-05, |
|
"loss": 0.3951, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 1.6336223389233466e-05, |
|
"loss": 0.4054, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 1.6145754789038205e-05, |
|
"loss": 0.366, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.5955648553359247e-05, |
|
"loss": 0.4008, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.5765922555411793e-05, |
|
"loss": 0.3746, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.5576594632662247e-05, |
|
"loss": 0.4082, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 1.53876825851512e-05, |
|
"loss": 0.373, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.5199204173819886e-05, |
|
"loss": 0.4913, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.5011177118840376e-05, |
|
"loss": 0.3975, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.4823619097949584e-05, |
|
"loss": 0.3414, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.4636547744787251e-05, |
|
"loss": 0.5242, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.444998064723809e-05, |
|
"loss": 0.4427, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.4263935345778202e-05, |
|
"loss": 0.4831, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 1.4078429331826007e-05, |
|
"loss": 0.4786, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.389348004609774e-05, |
|
"loss": 0.4109, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 0.5816357731819153, |
|
"eval_runtime": 15.1822, |
|
"eval_samples_per_second": 2.239, |
|
"eval_steps_per_second": 2.239, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 1.3709104876967732e-05, |
|
"loss": 0.3918, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 1.3525321158833582e-05, |
|
"loss": 0.3377, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 1.3342146170486468e-05, |
|
"loss": 0.4046, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 1.3159597133486628e-05, |
|
"loss": 0.3737, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 1.2977691210544228e-05, |
|
"loss": 0.4188, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 1.2796445503905797e-05, |
|
"loss": 0.3644, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 1.2615877053746315e-05, |
|
"loss": 0.5003, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 1.2436002836567154e-05, |
|
"loss": 0.3132, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 1.2256839763599993e-05, |
|
"loss": 0.3906, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 1.2078404679216864e-05, |
|
"loss": 0.3351, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 1.1900714359346527e-05, |
|
"loss": 0.4456, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 1.1723785509897219e-05, |
|
"loss": 0.4693, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 1.1547634765186016e-05, |
|
"loss": 0.342, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 1.1372278686374935e-05, |
|
"loss": 0.411, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 1.119773375991391e-05, |
|
"loss": 0.4285, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 1.1024016395990758e-05, |
|
"loss": 0.4612, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 1.0851142926988387e-05, |
|
"loss": 0.473, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 1.067912960594923e-05, |
|
"loss": 0.3247, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 1.0507992605047193e-05, |
|
"loss": 0.3601, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 1.033774801406723e-05, |
|
"loss": 0.3196, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 1.016841183889259e-05, |
|
"loss": 0.415, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 1.0000000000000006e-05, |
|
"loss": 0.4202, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 9.832528330962896e-06, |
|
"loss": 0.3907, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 9.666012576962743e-06, |
|
"loss": 0.3335, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 9.5004683933088e-06, |
|
"loss": 0.362, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 9.33591134396618e-06, |
|
"loss": 0.4044, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 9.172356900092607e-06, |
|
"loss": 0.4207, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 9.009820438583881e-06, |
|
"loss": 0.3547, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 8.848317240628146e-06, |
|
"loss": 0.3789, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 8.687862490269232e-06, |
|
"loss": 0.3858, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 8.528471272979083e-06, |
|
"loss": 0.4855, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 8.370158574239466e-06, |
|
"loss": 0.3644, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 8.212939278133105e-06, |
|
"loss": 0.5069, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 8.056828165944282e-06, |
|
"loss": 0.371, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 7.901839914769165e-06, |
|
"loss": 0.4089, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 7.747989096135943e-06, |
|
"loss": 0.3389, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 7.595290174634802e-06, |
|
"loss": 0.5253, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 7.443757506558033e-06, |
|
"loss": 0.4673, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 7.293405338550304e-06, |
|
"loss": 0.3363, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 7.1442478062692135e-06, |
|
"loss": 0.3789, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 6.996298933056333e-06, |
|
"loss": 0.3884, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 6.84957262861873e-06, |
|
"loss": 0.3993, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 6.704082687721243e-06, |
|
"loss": 0.3588, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 6.559842788889552e-06, |
|
"loss": 0.5273, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 6.4168664931241384e-06, |
|
"loss": 0.513, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 6.275167242625331e-06, |
|
"loss": 0.4905, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 6.1347583595295205e-06, |
|
"loss": 0.3483, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 5.9956530446566305e-06, |
|
"loss": 0.5998, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 5.857864376269051e-06, |
|
"loss": 0.3786, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 5.721405308842023e-06, |
|
"loss": 0.3979, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 5.586288671845708e-06, |
|
"loss": 0.3185, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 5.452527168539026e-06, |
|
"loss": 0.3171, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 5.320133374775296e-06, |
|
"loss": 0.4599, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 5.189119737819912e-06, |
|
"loss": 0.4121, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 5.059498575180084e-06, |
|
"loss": 0.3233, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 4.9312820734467855e-06, |
|
"loss": 0.5224, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 4.804482287149015e-06, |
|
"loss": 0.4146, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 4.679111137620442e-06, |
|
"loss": 0.3294, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 4.555180411878617e-06, |
|
"loss": 0.3016, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 4.4327017615168e-06, |
|
"loss": 0.2814, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 4.311686701608486e-06, |
|
"loss": 0.4891, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 4.1921466096248164e-06, |
|
"loss": 0.3668, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 4.074092724364889e-06, |
|
"loss": 0.3226, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 3.957536144899123e-06, |
|
"loss": 0.3381, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 3.842487829525779e-06, |
|
"loss": 0.3175, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 0.5760383605957031, |
|
"eval_runtime": 15.1722, |
|
"eval_samples_per_second": 2.241, |
|
"eval_steps_per_second": 2.241, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 3.7289585947406504e-06, |
|
"loss": 0.3827, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 3.616959114220162e-06, |
|
"loss": 0.33, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 3.5064999178178648e-06, |
|
"loss": 0.4641, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 3.397591390574424e-06, |
|
"loss": 0.2855, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 3.290243771741275e-06, |
|
"loss": 0.492, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 3.1844671538179407e-06, |
|
"loss": 0.3964, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 3.0802714816031787e-06, |
|
"loss": 0.4128, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 2.9776665512600054e-06, |
|
"loss": 0.5308, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 2.876662009394673e-06, |
|
"loss": 0.3281, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 2.777267352149724e-06, |
|
"loss": 0.4674, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 2.679491924311226e-06, |
|
"loss": 0.3746, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 2.5833449184301597e-06, |
|
"loss": 0.4369, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 2.488835373958185e-06, |
|
"loss": 0.3269, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 2.3959721763977805e-06, |
|
"loss": 0.4396, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 2.304764056466844e-06, |
|
"loss": 0.3521, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 2.2152195892778795e-06, |
|
"loss": 0.4001, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 2.127347193531757e-06, |
|
"loss": 0.3788, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 2.0411551307262356e-06, |
|
"loss": 0.4339, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 1.9566515043792455e-06, |
|
"loss": 0.3463, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 1.8738442592670014e-06, |
|
"loss": 0.4395, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 1.792741180677069e-06, |
|
"loss": 0.3108, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 1.713349893676415e-06, |
|
"loss": 0.322, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 1.6356778623945223e-06, |
|
"loss": 0.3564, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 1.5597323893216332e-06, |
|
"loss": 0.4093, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 1.4855206146221934e-06, |
|
"loss": 0.4037, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 1.4130495154635494e-06, |
|
"loss": 0.3104, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 1.3423259053599891e-06, |
|
"loss": 0.4232, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 1.273356433532138e-06, |
|
"loss": 0.3957, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 1.2061475842818337e-06, |
|
"loss": 0.4254, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 1.140705676382483e-06, |
|
"loss": 0.3837, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 1.0770368624849947e-06, |
|
"loss": 0.3663, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 1.0151471285393223e-06, |
|
"loss": 0.3837, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 9.550422932316938e-07, |
|
"loss": 0.331, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 8.967280074375395e-07, |
|
"loss": 0.3507, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 8.402097536902221e-07, |
|
"loss": 0.2632, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 7.854928456655897e-07, |
|
"loss": 0.45, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 7.325824276823934e-07, |
|
"loss": 0.3298, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 6.814834742186361e-07, |
|
"loss": 0.2389, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 6.322007894438842e-07, |
|
"loss": 0.3721, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 5.847390067676007e-07, |
|
"loss": 0.442, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 5.391025884035239e-07, |
|
"loss": 0.3195, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 4.952958249501393e-07, |
|
"loss": 0.3131, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 4.533228349872887e-07, |
|
"loss": 0.4478, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 4.1318756468897047e-07, |
|
"loss": 0.4988, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 3.748937874523062e-07, |
|
"loss": 0.3354, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 3.3844510354280157e-07, |
|
"loss": 0.3146, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 3.038449397558396e-07, |
|
"loss": 0.3662, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 2.7109654909452275e-07, |
|
"loss": 0.5044, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 2.402030104638198e-07, |
|
"loss": 0.3398, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 2.111672283811106e-07, |
|
"loss": 0.3719, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 1.839919327030937e-07, |
|
"loss": 0.3409, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 1.5867967836915354e-07, |
|
"loss": 0.3881, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 1.3523284516113955e-07, |
|
"loss": 0.3081, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 1.1365363747963155e-07, |
|
"loss": 0.4667, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 9.394408413668343e-08, |
|
"loss": 0.4495, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 7.61060381650891e-08, |
|
"loss": 0.3963, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 6.014117664415953e-08, |
|
"loss": 0.5192, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 4.60510005420467e-08, |
|
"loss": 0.4057, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 3.383683457463649e-08, |
|
"loss": 0.3126, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 2.3499827080992744e-08, |
|
"loss": 0.3974, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 1.5040949915399173e-08, |
|
"loss": 0.3476, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 8.460998355988014e-09, |
|
"loss": 0.3137, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 3.760591029973171e-09, |
|
"loss": 0.3195, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 9.401698554767358e-10, |
|
"loss": 0.3808, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.4407, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_loss": 0.5766593813896179, |
|
"eval_runtime": 15.1796, |
|
"eval_samples_per_second": 2.24, |
|
"eval_steps_per_second": 2.24, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"step": 1620, |
|
"total_flos": 1.426343690502144e+17, |
|
"train_loss": 0.49629129811569495, |
|
"train_runtime": 2296.2906, |
|
"train_samples_per_second": 0.705, |
|
"train_steps_per_second": 0.705 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1620, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"total_flos": 1.426343690502144e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|